File: | build/gcc/cselib.cc |
Warning: | line 2480, column 7 Value stored to 'l' is never read |
Press '?' to see keyboard shortcuts
Keyboard shortcuts:
1 | /* Common subexpression elimination library for GNU compiler. |
2 | Copyright (C) 1987-2023 Free Software Foundation, Inc. |
3 | |
4 | This file is part of GCC. |
5 | |
6 | GCC is free software; you can redistribute it and/or modify it under |
7 | the terms of the GNU General Public License as published by the Free |
8 | Software Foundation; either version 3, or (at your option) any later |
9 | version. |
10 | |
11 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
12 | WARRANTY; without even the implied warranty of MERCHANTABILITY or |
13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
14 | for more details. |
15 | |
16 | You should have received a copy of the GNU General Public License |
17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ |
19 | |
20 | #include "config.h" |
21 | #include "system.h" |
22 | #include "coretypes.h" |
23 | #include "backend.h" |
24 | #include "target.h" |
25 | #include "rtl.h" |
26 | #include "tree.h" |
27 | #include "df.h" |
28 | #include "memmodel.h" |
29 | #include "tm_p.h" |
30 | #include "regs.h" |
31 | #include "emit-rtl.h" |
32 | #include "dumpfile.h" |
33 | #include "cselib.h" |
34 | #include "function-abi.h" |
35 | #include "alias.h" |
36 | |
37 | /* A list of cselib_val structures. */ |
38 | struct elt_list |
39 | { |
40 | struct elt_list *next; |
41 | cselib_val *elt; |
42 | }; |
43 | |
44 | static bool cselib_record_memory; |
45 | static bool cselib_preserve_constants; |
46 | static bool cselib_any_perm_equivs; |
47 | static inline void promote_debug_loc (struct elt_loc_list *l); |
48 | static struct elt_list *new_elt_list (struct elt_list *, cselib_val *); |
49 | static void new_elt_loc_list (cselib_val *, rtx); |
50 | static void unchain_one_value (cselib_val *); |
51 | static void unchain_one_elt_list (struct elt_list **); |
52 | static void unchain_one_elt_loc_list (struct elt_loc_list **); |
53 | static void remove_useless_values (void); |
54 | static unsigned int cselib_hash_rtx (rtx, int, machine_mode); |
55 | static cselib_val *new_cselib_val (unsigned int, machine_mode, rtx); |
56 | static void add_mem_for_addr (cselib_val *, cselib_val *, rtx); |
57 | static cselib_val *cselib_lookup_mem (rtx, int); |
58 | static void cselib_invalidate_regno (unsigned int, machine_mode); |
59 | static void cselib_invalidate_mem (rtx); |
60 | static void cselib_record_set (rtx, cselib_val *, cselib_val *); |
61 | static void cselib_record_sets (rtx_insn *); |
62 | static rtx autoinc_split (rtx, rtx *, machine_mode); |
63 | |
64 | #define PRESERVED_VALUE_P(RTX)(__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if ( ((enum rtx_code) (_rtx)->code) != VALUE) rtl_check_failed_flag ("PRESERVED_VALUE_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 64, __FUNCTION__); _rtx; })->unchanging) \ |
65 | (RTL_FLAG_CHECK1 ("PRESERVED_VALUE_P", (RTX), VALUE)__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if (( (enum rtx_code) (_rtx)->code) != VALUE) rtl_check_failed_flag ("PRESERVED_VALUE_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 65, __FUNCTION__); _rtx; })->unchanging) |
66 | |
67 | #define SP_BASED_VALUE_P(RTX)(__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if ( ((enum rtx_code) (_rtx)->code) != VALUE) rtl_check_failed_flag ("SP_BASED_VALUE_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 67, __FUNCTION__); _rtx; })->jump) \ |
68 | (RTL_FLAG_CHECK1 ("SP_BASED_VALUE_P", (RTX), VALUE)__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if (( (enum rtx_code) (_rtx)->code) != VALUE) rtl_check_failed_flag ("SP_BASED_VALUE_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 68, __FUNCTION__); _rtx; })->jump) |
69 | |
70 | #define SP_DERIVED_VALUE_P(RTX)(__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if ( ((enum rtx_code) (_rtx)->code) != VALUE) rtl_check_failed_flag ("SP_DERIVED_VALUE_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 70, __FUNCTION__); _rtx; })->call) \ |
71 | (RTL_FLAG_CHECK1 ("SP_DERIVED_VALUE_P", (RTX), VALUE)__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if (( (enum rtx_code) (_rtx)->code) != VALUE) rtl_check_failed_flag ("SP_DERIVED_VALUE_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 71, __FUNCTION__); _rtx; })->call) |
72 | |
73 | struct expand_value_data |
74 | { |
75 | bitmap regs_active; |
76 | cselib_expand_callback callback; |
77 | void *callback_arg; |
78 | bool dummy; |
79 | }; |
80 | |
81 | static rtx cselib_expand_value_rtx_1 (rtx, struct expand_value_data *, int); |
82 | |
83 | /* This is a global so we don't have to pass this through every function. |
84 | It is used in new_elt_loc_list to set SETTING_INSN. */ |
85 | static rtx_insn *cselib_current_insn; |
86 | |
87 | /* There are three ways in which cselib can look up an rtx: |
88 | - for a REG, the reg_values table (which is indexed by regno) is used |
89 | - for a MEM, we recursively look up its address and then follow the |
90 | addr_list of that value |
91 | - for everything else, we compute a hash value and go through the hash |
92 | table. Since different rtx's can still have the same hash value, |
93 | this involves walking the table entries for a given value and comparing |
94 | the locations of the entries with the rtx we are looking up. */ |
95 | |
96 | struct cselib_hasher : nofree_ptr_hash <cselib_val> |
97 | { |
98 | struct key { |
99 | /* The rtx value and its mode (needed separately for constant |
100 | integers). */ |
101 | machine_mode mode; |
102 | rtx x; |
103 | /* The mode of the contaning MEM, if any, otherwise VOIDmode. */ |
104 | machine_mode memmode; |
105 | }; |
106 | typedef key *compare_type; |
107 | static inline hashval_t hash (const cselib_val *); |
108 | static inline bool equal (const cselib_val *, const key *); |
109 | }; |
110 | |
111 | /* The hash function for our hash table. The value is always computed with |
112 | cselib_hash_rtx when adding an element; this function just extracts the |
113 | hash value from a cselib_val structure. */ |
114 | |
115 | inline hashval_t |
116 | cselib_hasher::hash (const cselib_val *v) |
117 | { |
118 | return v->hash; |
119 | } |
120 | |
121 | /* The equality test for our hash table. The first argument V is a table |
122 | element (i.e. a cselib_val), while the second arg X is an rtx. We know |
123 | that all callers of htab_find_slot_with_hash will wrap CONST_INTs into a |
124 | CONST of an appropriate mode. */ |
125 | |
126 | inline bool |
127 | cselib_hasher::equal (const cselib_val *v, const key *x_arg) |
128 | { |
129 | struct elt_loc_list *l; |
130 | rtx x = x_arg->x; |
131 | machine_mode mode = x_arg->mode; |
132 | machine_mode memmode = x_arg->memmode; |
133 | |
134 | if (mode != GET_MODE (v->val_rtx)((machine_mode) (v->val_rtx)->mode)) |
135 | return false; |
136 | |
137 | if (GET_CODE (x)((enum rtx_code) (x)->code) == VALUE) |
138 | return x == v->val_rtx; |
139 | |
140 | if (SP_DERIVED_VALUE_P (v->val_rtx)(__extension__ ({ __typeof ((v->val_rtx)) const _rtx = ((v ->val_rtx)); if (((enum rtx_code) (_rtx)->code) != VALUE ) rtl_check_failed_flag ("SP_DERIVED_VALUE_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 140, __FUNCTION__); _rtx; })->call) && GET_MODE (x)((machine_mode) (x)->mode) == Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode)))) |
141 | { |
142 | rtx xoff = NULLnullptr; |
143 | if (autoinc_split (x, &xoff, memmode) == v->val_rtx && xoff == NULL_RTX(rtx) 0) |
144 | return true; |
145 | } |
146 | |
147 | /* We don't guarantee that distinct rtx's have different hash values, |
148 | so we need to do a comparison. */ |
149 | for (l = v->locs; l; l = l->next) |
150 | if (l->setting_insn && DEBUG_INSN_P (l->setting_insn)(((enum rtx_code) (l->setting_insn)->code) == DEBUG_INSN ) |
151 | && (!cselib_current_insn || !DEBUG_INSN_P (cselib_current_insn)(((enum rtx_code) (cselib_current_insn)->code) == DEBUG_INSN ))) |
152 | { |
153 | rtx_insn *save_cselib_current_insn = cselib_current_insn; |
154 | /* If l is so far a debug only loc, without debug stmts it |
155 | would never be compared to x at all, so temporarily pretend |
156 | current instruction is that DEBUG_INSN so that we don't |
157 | promote other debug locs even for unsuccessful comparison. */ |
158 | cselib_current_insn = l->setting_insn; |
159 | bool match = rtx_equal_for_cselib_1 (l->loc, x, memmode, 0); |
160 | cselib_current_insn = save_cselib_current_insn; |
161 | if (match) |
162 | { |
163 | promote_debug_loc (l); |
164 | return true; |
165 | } |
166 | } |
167 | else if (rtx_equal_for_cselib_1 (l->loc, x, memmode, 0)) |
168 | return true; |
169 | |
170 | return false; |
171 | } |
172 | |
173 | /* A table that enables us to look up elts by their value. */ |
174 | static hash_table<cselib_hasher> *cselib_hash_table; |
175 | |
176 | /* A table to hold preserved values. */ |
177 | static hash_table<cselib_hasher> *cselib_preserved_hash_table; |
178 | |
179 | /* The unique id that the next create value will take. */ |
180 | static unsigned int next_uid; |
181 | |
182 | /* The number of registers we had when the varrays were last resized. */ |
183 | static unsigned int cselib_nregs; |
184 | |
185 | /* Count values without known locations, or with only locations that |
186 | wouldn't have been known except for debug insns. Whenever this |
187 | grows too big, we remove these useless values from the table. |
188 | |
189 | Counting values with only debug values is a bit tricky. We don't |
190 | want to increment n_useless_values when we create a value for a |
191 | debug insn, for this would get n_useless_values out of sync, but we |
192 | want increment it if all locs in the list that were ever referenced |
193 | in nondebug insns are removed from the list. |
194 | |
195 | In the general case, once we do that, we'd have to stop accepting |
196 | nondebug expressions in the loc list, to avoid having two values |
197 | equivalent that, without debug insns, would have been made into |
198 | separate values. However, because debug insns never introduce |
199 | equivalences themselves (no assignments), the only means for |
200 | growing loc lists is through nondebug assignments. If the locs |
201 | also happen to be referenced in debug insns, it will work just fine. |
202 | |
203 | A consequence of this is that there's at most one debug-only loc in |
204 | each loc list. If we keep it in the first entry, testing whether |
205 | we have a debug-only loc list takes O(1). |
206 | |
207 | Furthermore, since any additional entry in a loc list containing a |
208 | debug loc would have to come from an assignment (nondebug) that |
209 | references both the initial debug loc and the newly-equivalent loc, |
210 | the initial debug loc would be promoted to a nondebug loc, and the |
211 | loc list would not contain debug locs any more. |
212 | |
213 | So the only case we have to be careful with in order to keep |
214 | n_useless_values in sync between debug and nondebug compilations is |
215 | to avoid incrementing n_useless_values when removing the single loc |
216 | from a value that turns out to not appear outside debug values. We |
217 | increment n_useless_debug_values instead, and leave such values |
218 | alone until, for other reasons, we garbage-collect useless |
219 | values. */ |
220 | static int n_useless_values; |
221 | static int n_useless_debug_values; |
222 | |
223 | /* Count values whose locs have been taken exclusively from debug |
224 | insns for the entire life of the value. */ |
225 | static int n_debug_values; |
226 | |
227 | /* Number of useless values before we remove them from the hash table. */ |
228 | #define MAX_USELESS_VALUES32 32 |
229 | |
230 | /* This table maps from register number to values. It does not |
231 | contain pointers to cselib_val structures, but rather elt_lists. |
232 | The purpose is to be able to refer to the same register in |
233 | different modes. The first element of the list defines the mode in |
234 | which the register was set; if the mode is unknown or the value is |
235 | no longer valid in that mode, ELT will be NULL for the first |
236 | element. */ |
237 | static struct elt_list **reg_values; |
238 | static unsigned int reg_values_size; |
239 | #define REG_VALUES(i)reg_values[i] reg_values[i] |
240 | |
241 | /* The largest number of hard regs used by any entry added to the |
242 | REG_VALUES table. Cleared on each cselib_clear_table() invocation. */ |
243 | static unsigned int max_value_regs; |
244 | |
245 | /* Here the set of indices I with REG_VALUES(I) != 0 is saved. This is used |
246 | in cselib_clear_table() for fast emptying. */ |
247 | static unsigned int *used_regs; |
248 | static unsigned int n_used_regs; |
249 | |
250 | /* We pass this to cselib_invalidate_mem to invalidate all of |
251 | memory for a non-const call instruction. */ |
252 | static GTY(()) rtx callmem; |
253 | |
254 | /* Set by discard_useless_locs if it deleted the last location of any |
255 | value. */ |
256 | static int values_became_useless; |
257 | |
258 | /* Used as stop element of the containing_mem list so we can check |
259 | presence in the list by checking the next pointer. */ |
260 | static cselib_val dummy_val; |
261 | |
262 | /* If non-NULL, value of the eliminated arg_pointer_rtx or frame_pointer_rtx |
263 | that is constant through the whole function and should never be |
264 | eliminated. */ |
265 | static cselib_val *cfa_base_preserved_val; |
266 | static unsigned int cfa_base_preserved_regno = INVALID_REGNUM(~(unsigned int) 0); |
267 | |
268 | /* Used to list all values that contain memory reference. |
269 | May or may not contain the useless values - the list is compacted |
270 | each time memory is invalidated. */ |
271 | static cselib_val *first_containing_mem = &dummy_val; |
272 | |
273 | static object_allocator<elt_list> elt_list_pool ("elt_list"); |
274 | static object_allocator<elt_loc_list> elt_loc_list_pool ("elt_loc_list"); |
275 | static object_allocator<cselib_val> cselib_val_pool ("cselib_val_list"); |
276 | |
277 | static pool_allocator value_pool ("value", RTX_CODE_SIZE (VALUE)rtx_code_size[VALUE]); |
278 | |
279 | /* If nonnull, cselib will call this function before freeing useless |
280 | VALUEs. A VALUE is deemed useless if its "locs" field is null. */ |
281 | void (*cselib_discard_hook) (cselib_val *); |
282 | |
283 | /* If nonnull, cselib will call this function before recording sets or |
284 | even clobbering outputs of INSN. All the recorded sets will be |
285 | represented in the array sets[n_sets]. new_val_min can be used to |
286 | tell whether values present in sets are introduced by this |
287 | instruction. */ |
288 | void (*cselib_record_sets_hook) (rtx_insn *insn, struct cselib_set *sets, |
289 | int n_sets); |
290 | |
291 | |
292 | |
293 | /* Allocate a struct elt_list and fill in its two elements with the |
294 | arguments. */ |
295 | |
296 | static inline struct elt_list * |
297 | new_elt_list (struct elt_list *next, cselib_val *elt) |
298 | { |
299 | elt_list *el = elt_list_pool.allocate (); |
300 | el->next = next; |
301 | el->elt = elt; |
302 | return el; |
303 | } |
304 | |
305 | /* Allocate a struct elt_loc_list with LOC and prepend it to VAL's loc |
306 | list. */ |
307 | |
308 | static inline void |
309 | new_elt_loc_list (cselib_val *val, rtx loc) |
310 | { |
311 | struct elt_loc_list *el, *next = val->locs; |
312 | |
313 | gcc_checking_assert (!next || !next->setting_insn((void)(!(!next || !next->setting_insn || !(((enum rtx_code ) (next->setting_insn)->code) == DEBUG_INSN) || cselib_current_insn == next->setting_insn) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 315, __FUNCTION__), 0 : 0)) |
314 | || !DEBUG_INSN_P (next->setting_insn)((void)(!(!next || !next->setting_insn || !(((enum rtx_code ) (next->setting_insn)->code) == DEBUG_INSN) || cselib_current_insn == next->setting_insn) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 315, __FUNCTION__), 0 : 0)) |
315 | || cselib_current_insn == next->setting_insn)((void)(!(!next || !next->setting_insn || !(((enum rtx_code ) (next->setting_insn)->code) == DEBUG_INSN) || cselib_current_insn == next->setting_insn) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 315, __FUNCTION__), 0 : 0)); |
316 | |
317 | /* If we're creating the first loc in a debug insn context, we've |
318 | just created a debug value. Count it. */ |
319 | if (!next && cselib_current_insn && DEBUG_INSN_P (cselib_current_insn)(((enum rtx_code) (cselib_current_insn)->code) == DEBUG_INSN )) |
320 | n_debug_values++; |
321 | |
322 | val = canonical_cselib_val (val); |
323 | next = val->locs; |
324 | |
325 | if (GET_CODE (loc)((enum rtx_code) (loc)->code) == VALUE) |
326 | { |
327 | loc = canonical_cselib_val (CSELIB_VAL_PTR (loc)(((loc)->u.fld[0]).rt_cselib))->val_rtx; |
328 | |
329 | gcc_checking_assert (PRESERVED_VALUE_P (loc)((void)(!((__extension__ ({ __typeof ((loc)) const _rtx = ((loc )); if (((enum rtx_code) (_rtx)->code) != VALUE) rtl_check_failed_flag ("PRESERVED_VALUE_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 329, __FUNCTION__); _rtx; })->unchanging) == (__extension__ ({ __typeof ((val->val_rtx)) const _rtx = ((val->val_rtx )); if (((enum rtx_code) (_rtx)->code) != VALUE) rtl_check_failed_flag ("PRESERVED_VALUE_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 330, __FUNCTION__); _rtx; })->unchanging)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 330, __FUNCTION__), 0 : 0)) |
330 | == PRESERVED_VALUE_P (val->val_rtx))((void)(!((__extension__ ({ __typeof ((loc)) const _rtx = ((loc )); if (((enum rtx_code) (_rtx)->code) != VALUE) rtl_check_failed_flag ("PRESERVED_VALUE_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 329, __FUNCTION__); _rtx; })->unchanging) == (__extension__ ({ __typeof ((val->val_rtx)) const _rtx = ((val->val_rtx )); if (((enum rtx_code) (_rtx)->code) != VALUE) rtl_check_failed_flag ("PRESERVED_VALUE_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 330, __FUNCTION__); _rtx; })->unchanging)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 330, __FUNCTION__), 0 : 0)); |
331 | |
332 | if (val->val_rtx == loc) |
333 | return; |
334 | else if (val->uid > CSELIB_VAL_PTR (loc)(((loc)->u.fld[0]).rt_cselib)->uid) |
335 | { |
336 | /* Reverse the insertion. */ |
337 | new_elt_loc_list (CSELIB_VAL_PTR (loc)(((loc)->u.fld[0]).rt_cselib), val->val_rtx); |
338 | return; |
339 | } |
340 | |
341 | gcc_checking_assert (val->uid < CSELIB_VAL_PTR (loc)->uid)((void)(!(val->uid < (((loc)->u.fld[0]).rt_cselib)-> uid) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 341, __FUNCTION__), 0 : 0)); |
342 | |
343 | if (CSELIB_VAL_PTR (loc)(((loc)->u.fld[0]).rt_cselib)->locs) |
344 | { |
345 | /* Bring all locs from LOC to VAL. */ |
346 | for (el = CSELIB_VAL_PTR (loc)(((loc)->u.fld[0]).rt_cselib)->locs; el->next; el = el->next) |
347 | { |
348 | /* Adjust values that have LOC as canonical so that VAL |
349 | becomes their canonical. */ |
350 | if (el->loc && GET_CODE (el->loc)((enum rtx_code) (el->loc)->code) == VALUE) |
351 | { |
352 | gcc_checking_assert (CSELIB_VAL_PTR (el->loc)->locs->loc((void)(!((((el->loc)->u.fld[0]).rt_cselib)->locs-> loc == loc) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 353, __FUNCTION__), 0 : 0)) |
353 | == loc)((void)(!((((el->loc)->u.fld[0]).rt_cselib)->locs-> loc == loc) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 353, __FUNCTION__), 0 : 0)); |
354 | CSELIB_VAL_PTR (el->loc)(((el->loc)->u.fld[0]).rt_cselib)->locs->loc = val->val_rtx; |
355 | } |
356 | } |
357 | el->next = val->locs; |
358 | next = val->locs = CSELIB_VAL_PTR (loc)(((loc)->u.fld[0]).rt_cselib)->locs; |
359 | } |
360 | |
361 | if (CSELIB_VAL_PTR (loc)(((loc)->u.fld[0]).rt_cselib)->addr_list) |
362 | { |
363 | /* Bring in addr_list into canonical node. */ |
364 | struct elt_list *last = CSELIB_VAL_PTR (loc)(((loc)->u.fld[0]).rt_cselib)->addr_list; |
365 | while (last->next) |
366 | last = last->next; |
367 | last->next = val->addr_list; |
368 | val->addr_list = CSELIB_VAL_PTR (loc)(((loc)->u.fld[0]).rt_cselib)->addr_list; |
369 | CSELIB_VAL_PTR (loc)(((loc)->u.fld[0]).rt_cselib)->addr_list = NULLnullptr; |
370 | } |
371 | |
372 | if (CSELIB_VAL_PTR (loc)(((loc)->u.fld[0]).rt_cselib)->next_containing_mem != NULLnullptr |
373 | && val->next_containing_mem == NULLnullptr) |
374 | { |
375 | /* Add VAL to the containing_mem list after LOC. LOC will |
376 | be removed when we notice it doesn't contain any |
377 | MEMs. */ |
378 | val->next_containing_mem = CSELIB_VAL_PTR (loc)(((loc)->u.fld[0]).rt_cselib)->next_containing_mem; |
379 | CSELIB_VAL_PTR (loc)(((loc)->u.fld[0]).rt_cselib)->next_containing_mem = val; |
380 | } |
381 | |
382 | /* Chain LOC back to VAL. */ |
383 | el = elt_loc_list_pool.allocate (); |
384 | el->loc = val->val_rtx; |
385 | el->setting_insn = cselib_current_insn; |
386 | el->next = NULLnullptr; |
387 | CSELIB_VAL_PTR (loc)(((loc)->u.fld[0]).rt_cselib)->locs = el; |
388 | } |
389 | |
390 | el = elt_loc_list_pool.allocate (); |
391 | el->loc = loc; |
392 | el->setting_insn = cselib_current_insn; |
393 | el->next = next; |
394 | val->locs = el; |
395 | } |
396 | |
397 | /* Promote loc L to a nondebug cselib_current_insn if L is marked as |
398 | originating from a debug insn, maintaining the debug values |
399 | count. */ |
400 | |
401 | static inline void |
402 | promote_debug_loc (struct elt_loc_list *l) |
403 | { |
404 | if (l && l->setting_insn && DEBUG_INSN_P (l->setting_insn)(((enum rtx_code) (l->setting_insn)->code) == DEBUG_INSN ) |
405 | && (!cselib_current_insn || !DEBUG_INSN_P (cselib_current_insn)(((enum rtx_code) (cselib_current_insn)->code) == DEBUG_INSN ))) |
406 | { |
407 | n_debug_values--; |
408 | l->setting_insn = cselib_current_insn; |
409 | if (cselib_preserve_constants && l->next) |
410 | { |
411 | gcc_assert (l->next->setting_insn((void)(!(l->next->setting_insn && (((enum rtx_code ) (l->next->setting_insn)->code) == DEBUG_INSN) && !l->next->next) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 413, __FUNCTION__), 0 : 0)) |
412 | && DEBUG_INSN_P (l->next->setting_insn)((void)(!(l->next->setting_insn && (((enum rtx_code ) (l->next->setting_insn)->code) == DEBUG_INSN) && !l->next->next) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 413, __FUNCTION__), 0 : 0)) |
413 | && !l->next->next)((void)(!(l->next->setting_insn && (((enum rtx_code ) (l->next->setting_insn)->code) == DEBUG_INSN) && !l->next->next) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 413, __FUNCTION__), 0 : 0)); |
414 | l->next->setting_insn = cselib_current_insn; |
415 | } |
416 | else |
417 | gcc_assert (!l->next)((void)(!(!l->next) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 417, __FUNCTION__), 0 : 0)); |
418 | } |
419 | } |
420 | |
421 | /* The elt_list at *PL is no longer needed. Unchain it and free its |
422 | storage. */ |
423 | |
424 | static inline void |
425 | unchain_one_elt_list (struct elt_list **pl) |
426 | { |
427 | struct elt_list *l = *pl; |
428 | |
429 | *pl = l->next; |
430 | elt_list_pool.remove (l); |
431 | } |
432 | |
433 | /* Likewise for elt_loc_lists. */ |
434 | |
435 | static void |
436 | unchain_one_elt_loc_list (struct elt_loc_list **pl) |
437 | { |
438 | struct elt_loc_list *l = *pl; |
439 | |
440 | *pl = l->next; |
441 | elt_loc_list_pool.remove (l); |
442 | } |
443 | |
444 | /* Likewise for cselib_vals. This also frees the addr_list associated with |
445 | V. */ |
446 | |
447 | static void |
448 | unchain_one_value (cselib_val *v) |
449 | { |
450 | while (v->addr_list) |
451 | unchain_one_elt_list (&v->addr_list); |
452 | |
453 | cselib_val_pool.remove (v); |
454 | } |
455 | |
456 | /* Remove all entries from the hash table. Also used during |
457 | initialization. */ |
458 | |
459 | void |
460 | cselib_clear_table (void) |
461 | { |
462 | cselib_reset_table (1); |
463 | } |
464 | |
465 | /* Return TRUE if V is a constant, a function invariant or a VALUE |
466 | equivalence; FALSE otherwise. */ |
467 | |
468 | static bool |
469 | invariant_or_equiv_p (cselib_val *v) |
470 | { |
471 | struct elt_loc_list *l; |
472 | |
473 | if (v == cfa_base_preserved_val) |
474 | return true; |
475 | |
476 | /* Keep VALUE equivalences around. */ |
477 | for (l = v->locs; l; l = l->next) |
478 | if (GET_CODE (l->loc)((enum rtx_code) (l->loc)->code) == VALUE) |
479 | return true; |
480 | |
481 | if (v->locs != NULLnullptr |
482 | && v->locs->next == NULLnullptr) |
483 | { |
484 | if (CONSTANT_P (v->locs->loc)((rtx_class[(int) (((enum rtx_code) (v->locs->loc)-> code))]) == RTX_CONST_OBJ) |
485 | && (GET_CODE (v->locs->loc)((enum rtx_code) (v->locs->loc)->code) != CONST |
486 | || !references_value_p (v->locs->loc, 0))) |
487 | return true; |
488 | /* Although a debug expr may be bound to different expressions, |
489 | we can preserve it as if it was constant, to get unification |
490 | and proper merging within var-tracking. */ |
491 | if (GET_CODE (v->locs->loc)((enum rtx_code) (v->locs->loc)->code) == DEBUG_EXPR |
492 | || GET_CODE (v->locs->loc)((enum rtx_code) (v->locs->loc)->code) == DEBUG_IMPLICIT_PTR |
493 | || GET_CODE (v->locs->loc)((enum rtx_code) (v->locs->loc)->code) == ENTRY_VALUE |
494 | || GET_CODE (v->locs->loc)((enum rtx_code) (v->locs->loc)->code) == DEBUG_PARAMETER_REF) |
495 | return true; |
496 | |
497 | /* (plus (value V) (const_int C)) is invariant iff V is invariant. */ |
498 | if (GET_CODE (v->locs->loc)((enum rtx_code) (v->locs->loc)->code) == PLUS |
499 | && CONST_INT_P (XEXP (v->locs->loc, 1))(((enum rtx_code) ((((v->locs->loc)->u.fld[1]).rt_rtx ))->code) == CONST_INT) |
500 | && GET_CODE (XEXP (v->locs->loc, 0))((enum rtx_code) ((((v->locs->loc)->u.fld[0]).rt_rtx ))->code) == VALUE |
501 | && invariant_or_equiv_p (CSELIB_VAL_PTR (XEXP (v->locs->loc, 0))((((((v->locs->loc)->u.fld[0]).rt_rtx))->u.fld[0] ).rt_cselib))) |
502 | return true; |
503 | } |
504 | |
505 | return false; |
506 | } |
507 | |
508 | /* Remove from hash table all VALUEs except constants, function |
509 | invariants and VALUE equivalences. */ |
510 | |
511 | int |
512 | preserve_constants_and_equivs (cselib_val **x, void *info ATTRIBUTE_UNUSED__attribute__ ((__unused__))) |
513 | { |
514 | cselib_val *v = *x; |
515 | |
516 | if (invariant_or_equiv_p (v)) |
517 | { |
518 | cselib_hasher::key lookup = { |
519 | GET_MODE (v->val_rtx)((machine_mode) (v->val_rtx)->mode), v->val_rtx, VOIDmode((void) 0, E_VOIDmode) |
520 | }; |
521 | cselib_val **slot |
522 | = cselib_preserved_hash_table->find_slot_with_hash (&lookup, |
523 | v->hash, INSERT); |
524 | gcc_assert (!*slot)((void)(!(!*slot) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 524, __FUNCTION__), 0 : 0)); |
525 | *slot = v; |
526 | } |
527 | |
528 | cselib_hash_table->clear_slot (x); |
529 | |
530 | return 1; |
531 | } |
532 | |
533 | /* Remove all entries from the hash table, arranging for the next |
534 | value to be numbered NUM. */ |
535 | |
536 | void |
537 | cselib_reset_table (unsigned int num) |
538 | { |
539 | unsigned int i; |
540 | |
541 | max_value_regs = 0; |
542 | |
543 | if (cfa_base_preserved_val) |
544 | { |
545 | unsigned int regno = cfa_base_preserved_regno; |
546 | unsigned int new_used_regs = 0; |
547 | for (i = 0; i < n_used_regs; i++) |
548 | if (used_regs[i] == regno) |
549 | { |
550 | new_used_regs = 1; |
551 | continue; |
552 | } |
553 | else |
554 | REG_VALUES (used_regs[i])reg_values[used_regs[i]] = 0; |
555 | gcc_assert (new_used_regs == 1)((void)(!(new_used_regs == 1) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 555, __FUNCTION__), 0 : 0)); |
556 | n_used_regs = new_used_regs; |
557 | used_regs[0] = regno; |
558 | max_value_regs |
559 | = hard_regno_nregs (regno, |
560 | GET_MODE (cfa_base_preserved_val->locs->loc)((machine_mode) (cfa_base_preserved_val->locs->loc)-> mode)); |
561 | |
562 | /* If cfa_base is sp + const_int, need to preserve also the |
563 | SP_DERIVED_VALUE_P value. */ |
564 | for (struct elt_loc_list *l = cfa_base_preserved_val->locs; |
565 | l; l = l->next) |
566 | if (GET_CODE (l->loc)((enum rtx_code) (l->loc)->code) == PLUS |
567 | && GET_CODE (XEXP (l->loc, 0))((enum rtx_code) ((((l->loc)->u.fld[0]).rt_rtx))->code ) == VALUE |
568 | && SP_DERIVED_VALUE_P (XEXP (l->loc, 0))(__extension__ ({ __typeof (((((l->loc)->u.fld[0]).rt_rtx ))) const _rtx = (((((l->loc)->u.fld[0]).rt_rtx))); if ( ((enum rtx_code) (_rtx)->code) != VALUE) rtl_check_failed_flag ("SP_DERIVED_VALUE_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 568, __FUNCTION__); _rtx; })->call) |
569 | && CONST_INT_P (XEXP (l->loc, 1))(((enum rtx_code) ((((l->loc)->u.fld[1]).rt_rtx))->code ) == CONST_INT)) |
570 | { |
571 | if (! invariant_or_equiv_p (CSELIB_VAL_PTR (XEXP (l->loc, 0))((((((l->loc)->u.fld[0]).rt_rtx))->u.fld[0]).rt_cselib ))) |
572 | { |
573 | rtx val = cfa_base_preserved_val->val_rtx; |
574 | rtx_insn *save_cselib_current_insn = cselib_current_insn; |
575 | cselib_current_insn = l->setting_insn; |
576 | new_elt_loc_list (CSELIB_VAL_PTR (XEXP (l->loc, 0))((((((l->loc)->u.fld[0]).rt_rtx))->u.fld[0]).rt_cselib ), |
577 | plus_constant (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))), val, |
578 | -UINTVAL (XEXP (l->loc, 1))((unsigned long) (((((l->loc)->u.fld[1]).rt_rtx))->u .hwint[0])))); |
579 | cselib_current_insn = save_cselib_current_insn; |
580 | } |
581 | break; |
582 | } |
583 | } |
584 | else |
585 | { |
586 | for (i = 0; i < n_used_regs; i++) |
587 | REG_VALUES (used_regs[i])reg_values[used_regs[i]] = 0; |
588 | n_used_regs = 0; |
589 | } |
590 | |
591 | if (cselib_preserve_constants) |
592 | cselib_hash_table->traverse <void *, preserve_constants_and_equivs> (NULLnullptr); |
593 | else |
594 | { |
595 | cselib_hash_table->empty (); |
596 | gcc_checking_assert (!cselib_any_perm_equivs)((void)(!(!cselib_any_perm_equivs) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 596, __FUNCTION__), 0 : 0)); |
597 | } |
598 | |
599 | n_useless_values = 0; |
600 | n_useless_debug_values = 0; |
601 | n_debug_values = 0; |
602 | |
603 | next_uid = num; |
604 | |
605 | first_containing_mem = &dummy_val; |
606 | } |
607 | |
608 | /* Return the number of the next value that will be generated. */ |
609 | |
610 | unsigned int |
611 | cselib_get_next_uid (void) |
612 | { |
613 | return next_uid; |
614 | } |
615 | |
616 | /* Search for X, whose hashcode is HASH, in CSELIB_HASH_TABLE, |
617 | INSERTing if requested. When X is part of the address of a MEM, |
618 | MEMMODE should specify the mode of the MEM. */ |
619 | |
620 | static cselib_val ** |
621 | cselib_find_slot (machine_mode mode, rtx x, hashval_t hash, |
622 | enum insert_option insert, machine_mode memmode) |
623 | { |
624 | cselib_val **slot = NULLnullptr; |
625 | cselib_hasher::key lookup = { mode, x, memmode }; |
626 | if (cselib_preserve_constants) |
627 | slot = cselib_preserved_hash_table->find_slot_with_hash (&lookup, hash, |
628 | NO_INSERT); |
629 | if (!slot) |
630 | slot = cselib_hash_table->find_slot_with_hash (&lookup, hash, insert); |
631 | return slot; |
632 | } |
633 | |
634 | /* Return true if X contains a VALUE rtx. If ONLY_USELESS is set, we |
635 | only return true for values which point to a cselib_val whose value |
636 | element has been set to zero, which implies the cselib_val will be |
637 | removed. */ |
638 | |
639 | int |
640 | references_value_p (const_rtx x, int only_useless) |
641 | { |
642 | const enum rtx_code code = GET_CODE (x)((enum rtx_code) (x)->code); |
643 | const char *fmt = GET_RTX_FORMAT (code)(rtx_format[(int) (code)]); |
644 | int i, j; |
645 | |
646 | if (GET_CODE (x)((enum rtx_code) (x)->code) == VALUE |
647 | && (! only_useless |
648 | || (CSELIB_VAL_PTR (x)(((x)->u.fld[0]).rt_cselib)->locs == 0 && !PRESERVED_VALUE_P (x)(__extension__ ({ __typeof ((x)) const _rtx = ((x)); if (((enum rtx_code) (_rtx)->code) != VALUE) rtl_check_failed_flag ( "PRESERVED_VALUE_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 648, __FUNCTION__); _rtx; })->unchanging)))) |
649 | return 1; |
650 | |
651 | for (i = GET_RTX_LENGTH (code)(rtx_length[(int) (code)]) - 1; i >= 0; i--) |
652 | { |
653 | if (fmt[i] == 'e' && references_value_p (XEXP (x, i)(((x)->u.fld[i]).rt_rtx), only_useless)) |
654 | return 1; |
655 | else if (fmt[i] == 'E') |
656 | for (j = 0; j < XVECLEN (x, i)(((((x)->u.fld[i]).rt_rtvec))->num_elem); j++) |
657 | if (references_value_p (XVECEXP (x, i, j)(((((x)->u.fld[i]).rt_rtvec))->elem[j]), only_useless)) |
658 | return 1; |
659 | } |
660 | |
661 | return 0; |
662 | } |
663 | |
664 | /* Return true if V is a useless VALUE and can be discarded as such. */ |
665 | |
666 | static bool |
667 | cselib_useless_value_p (cselib_val *v) |
668 | { |
669 | return (v->locs == 0 |
670 | && !PRESERVED_VALUE_P (v->val_rtx)(__extension__ ({ __typeof ((v->val_rtx)) const _rtx = ((v ->val_rtx)); if (((enum rtx_code) (_rtx)->code) != VALUE ) rtl_check_failed_flag ("PRESERVED_VALUE_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 670, __FUNCTION__); _rtx; })->unchanging) |
671 | && !SP_DERIVED_VALUE_P (v->val_rtx)(__extension__ ({ __typeof ((v->val_rtx)) const _rtx = ((v ->val_rtx)); if (((enum rtx_code) (_rtx)->code) != VALUE ) rtl_check_failed_flag ("SP_DERIVED_VALUE_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 671, __FUNCTION__); _rtx; })->call)); |
672 | } |
673 | |
674 | /* For all locations found in X, delete locations that reference useless |
675 | values (i.e. values without any location). Called through |
676 | htab_traverse. */ |
677 | |
678 | int |
679 | discard_useless_locs (cselib_val **x, void *info ATTRIBUTE_UNUSED__attribute__ ((__unused__))) |
680 | { |
681 | cselib_val *v = *x; |
682 | struct elt_loc_list **p = &v->locs; |
683 | bool had_locs = v->locs != NULLnullptr; |
684 | rtx_insn *setting_insn = v->locs ? v->locs->setting_insn : NULLnullptr; |
685 | |
686 | while (*p) |
687 | { |
688 | if (references_value_p ((*p)->loc, 1)) |
689 | unchain_one_elt_loc_list (p); |
690 | else |
691 | p = &(*p)->next; |
692 | } |
693 | |
694 | if (had_locs && cselib_useless_value_p (v)) |
695 | { |
696 | if (setting_insn && DEBUG_INSN_P (setting_insn)(((enum rtx_code) (setting_insn)->code) == DEBUG_INSN)) |
697 | n_useless_debug_values++; |
698 | else |
699 | n_useless_values++; |
700 | values_became_useless = 1; |
701 | } |
702 | return 1; |
703 | } |
704 | |
705 | /* If X is a value with no locations, remove it from the hashtable. */ |
706 | |
707 | int |
708 | discard_useless_values (cselib_val **x, void *info ATTRIBUTE_UNUSED__attribute__ ((__unused__))) |
709 | { |
710 | cselib_val *v = *x; |
711 | |
712 | if (v->locs == 0 && cselib_useless_value_p (v)) |
713 | { |
714 | if (cselib_discard_hook) |
715 | cselib_discard_hook (v); |
716 | |
717 | CSELIB_VAL_PTR (v->val_rtx)(((v->val_rtx)->u.fld[0]).rt_cselib) = NULLnullptr; |
718 | cselib_hash_table->clear_slot (x); |
719 | unchain_one_value (v); |
720 | n_useless_values--; |
721 | } |
722 | |
723 | return 1; |
724 | } |
725 | |
726 | /* Clean out useless values (i.e. those which no longer have locations |
727 | associated with them) from the hash table. */ |
728 | |
729 | static void |
730 | remove_useless_values (void) |
731 | { |
732 | cselib_val **p, *v; |
733 | |
734 | /* First pass: eliminate locations that reference the value. That in |
735 | turn can make more values useless. */ |
736 | do |
737 | { |
738 | values_became_useless = 0; |
739 | cselib_hash_table->traverse <void *, discard_useless_locs> (NULLnullptr); |
740 | } |
741 | while (values_became_useless); |
742 | |
743 | /* Second pass: actually remove the values. */ |
744 | |
745 | p = &first_containing_mem; |
746 | for (v = *p; v != &dummy_val; v = v->next_containing_mem) |
747 | if (v->locs && v == canonical_cselib_val (v)) |
748 | { |
749 | *p = v; |
750 | p = &(*p)->next_containing_mem; |
751 | } |
752 | *p = &dummy_val; |
753 | |
754 | n_useless_values += n_useless_debug_values; |
755 | n_debug_values -= n_useless_debug_values; |
756 | n_useless_debug_values = 0; |
757 | |
758 | cselib_hash_table->traverse <void *, discard_useless_values> (NULLnullptr); |
759 | |
760 | gcc_assert (!n_useless_values)((void)(!(!n_useless_values) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 760, __FUNCTION__), 0 : 0)); |
761 | } |
762 | |
763 | /* Arrange for a value to not be removed from the hash table even if |
764 | it becomes useless. */ |
765 | |
766 | void |
767 | cselib_preserve_value (cselib_val *v) |
768 | { |
769 | PRESERVED_VALUE_P (v->val_rtx)(__extension__ ({ __typeof ((v->val_rtx)) const _rtx = ((v ->val_rtx)); if (((enum rtx_code) (_rtx)->code) != VALUE ) rtl_check_failed_flag ("PRESERVED_VALUE_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 769, __FUNCTION__); _rtx; })->unchanging) = 1; |
770 | } |
771 | |
772 | /* Test whether a value is preserved. */ |
773 | |
774 | bool |
775 | cselib_preserved_value_p (cselib_val *v) |
776 | { |
777 | return PRESERVED_VALUE_P (v->val_rtx)(__extension__ ({ __typeof ((v->val_rtx)) const _rtx = ((v ->val_rtx)); if (((enum rtx_code) (_rtx)->code) != VALUE ) rtl_check_failed_flag ("PRESERVED_VALUE_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 777, __FUNCTION__); _rtx; })->unchanging); |
778 | } |
779 | |
780 | /* Arrange for a REG value to be assumed constant through the whole function, |
781 | never invalidated and preserved across cselib_reset_table calls. */ |
782 | |
783 | void |
784 | cselib_preserve_cfa_base_value (cselib_val *v, unsigned int regno) |
785 | { |
786 | if (cselib_preserve_constants |
787 | && v->locs |
788 | && REG_P (v->locs->loc)(((enum rtx_code) (v->locs->loc)->code) == REG)) |
789 | { |
790 | cfa_base_preserved_val = v; |
791 | cfa_base_preserved_regno = regno; |
792 | } |
793 | } |
794 | |
795 | /* Clean all non-constant expressions in the hash table, but retain |
796 | their values. */ |
797 | |
798 | void |
799 | cselib_preserve_only_values (void) |
800 | { |
801 | int i; |
802 | |
803 | for (i = 0; i < FIRST_PSEUDO_REGISTER76; i++) |
804 | cselib_invalidate_regno (i, reg_raw_mode(this_target_regs->x_reg_raw_mode)[i]); |
805 | |
806 | cselib_invalidate_mem (callmem); |
807 | |
808 | remove_useless_values (); |
809 | |
810 | gcc_assert (first_containing_mem == &dummy_val)((void)(!(first_containing_mem == &dummy_val) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 810, __FUNCTION__), 0 : 0)); |
811 | } |
812 | |
813 | /* Arrange for a value to be marked as based on stack pointer |
814 | for find_base_term purposes. */ |
815 | |
816 | void |
817 | cselib_set_value_sp_based (cselib_val *v) |
818 | { |
819 | SP_BASED_VALUE_P (v->val_rtx)(__extension__ ({ __typeof ((v->val_rtx)) const _rtx = ((v ->val_rtx)); if (((enum rtx_code) (_rtx)->code) != VALUE ) rtl_check_failed_flag ("SP_BASED_VALUE_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 819, __FUNCTION__); _rtx; })->jump) = 1; |
820 | } |
821 | |
822 | /* Test whether a value is based on stack pointer for |
823 | find_base_term purposes. */ |
824 | |
825 | bool |
826 | cselib_sp_based_value_p (cselib_val *v) |
827 | { |
828 | return SP_BASED_VALUE_P (v->val_rtx)(__extension__ ({ __typeof ((v->val_rtx)) const _rtx = ((v ->val_rtx)); if (((enum rtx_code) (_rtx)->code) != VALUE ) rtl_check_failed_flag ("SP_BASED_VALUE_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 828, __FUNCTION__); _rtx; })->jump); |
829 | } |
830 | |
831 | /* Return the mode in which a register was last set. If X is not a |
832 | register, return its mode. If the mode in which the register was |
833 | set is not known, or the value was already clobbered, return |
834 | VOIDmode. */ |
835 | |
836 | machine_mode |
837 | cselib_reg_set_mode (const_rtx x) |
838 | { |
839 | if (!REG_P (x)(((enum rtx_code) (x)->code) == REG)) |
840 | return GET_MODE (x)((machine_mode) (x)->mode); |
841 | |
842 | if (REG_VALUES (REGNO (x))reg_values[(rhs_regno(x))] == NULLnullptr |
843 | || REG_VALUES (REGNO (x))reg_values[(rhs_regno(x))]->elt == NULLnullptr) |
844 | return VOIDmode((void) 0, E_VOIDmode); |
845 | |
846 | return GET_MODE (REG_VALUES (REGNO (x))->elt->val_rtx)((machine_mode) (reg_values[(rhs_regno(x))]->elt->val_rtx )->mode); |
847 | } |
848 | |
849 | /* If x is a PLUS or an autoinc operation, expand the operation, |
850 | storing the offset, if any, in *OFF. */ |
851 | |
852 | static rtx |
853 | autoinc_split (rtx x, rtx *off, machine_mode memmode) |
854 | { |
855 | switch (GET_CODE (x)((enum rtx_code) (x)->code)) |
856 | { |
857 | case PLUS: |
858 | *off = XEXP (x, 1)(((x)->u.fld[1]).rt_rtx); |
859 | x = XEXP (x, 0)(((x)->u.fld[0]).rt_rtx); |
860 | break; |
861 | |
862 | case PRE_DEC: |
863 | if (memmode == VOIDmode((void) 0, E_VOIDmode)) |
864 | return x; |
865 | |
866 | *off = gen_int_mode (-GET_MODE_SIZE (memmode), GET_MODE (x)((machine_mode) (x)->mode)); |
867 | x = XEXP (x, 0)(((x)->u.fld[0]).rt_rtx); |
868 | break; |
869 | |
870 | case PRE_INC: |
871 | if (memmode == VOIDmode((void) 0, E_VOIDmode)) |
872 | return x; |
873 | |
874 | *off = gen_int_mode (GET_MODE_SIZE (memmode), GET_MODE (x)((machine_mode) (x)->mode)); |
875 | x = XEXP (x, 0)(((x)->u.fld[0]).rt_rtx); |
876 | break; |
877 | |
878 | case PRE_MODIFY: |
879 | x = XEXP (x, 1)(((x)->u.fld[1]).rt_rtx); |
880 | break; |
881 | |
882 | case POST_DEC: |
883 | case POST_INC: |
884 | case POST_MODIFY: |
885 | x = XEXP (x, 0)(((x)->u.fld[0]).rt_rtx); |
886 | break; |
887 | |
888 | default: |
889 | break; |
890 | } |
891 | |
892 | if (GET_MODE (x)((machine_mode) (x)->mode) == Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))) |
893 | && (REG_P (x)(((enum rtx_code) (x)->code) == REG) || MEM_P (x)(((enum rtx_code) (x)->code) == MEM) || GET_CODE (x)((enum rtx_code) (x)->code) == VALUE) |
894 | && (*off == NULL_RTX(rtx) 0 || CONST_INT_P (*off)(((enum rtx_code) (*off)->code) == CONST_INT))) |
895 | { |
896 | cselib_val *e; |
897 | if (GET_CODE (x)((enum rtx_code) (x)->code) == VALUE) |
898 | e = CSELIB_VAL_PTR (x)(((x)->u.fld[0]).rt_cselib); |
899 | else |
900 | e = cselib_lookup (x, GET_MODE (x)((machine_mode) (x)->mode), 0, memmode); |
901 | if (e) |
902 | { |
903 | if (SP_DERIVED_VALUE_P (e->val_rtx)(__extension__ ({ __typeof ((e->val_rtx)) const _rtx = ((e ->val_rtx)); if (((enum rtx_code) (_rtx)->code) != VALUE ) rtl_check_failed_flag ("SP_DERIVED_VALUE_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 903, __FUNCTION__); _rtx; })->call) |
904 | && (*off == NULL_RTX(rtx) 0 || *off == const0_rtx(const_int_rtx[64]))) |
905 | { |
906 | *off = NULL_RTX(rtx) 0; |
907 | return e->val_rtx; |
908 | } |
909 | for (struct elt_loc_list *l = e->locs; l; l = l->next) |
910 | if (GET_CODE (l->loc)((enum rtx_code) (l->loc)->code) == PLUS |
911 | && GET_CODE (XEXP (l->loc, 0))((enum rtx_code) ((((l->loc)->u.fld[0]).rt_rtx))->code ) == VALUE |
912 | && SP_DERIVED_VALUE_P (XEXP (l->loc, 0))(__extension__ ({ __typeof (((((l->loc)->u.fld[0]).rt_rtx ))) const _rtx = (((((l->loc)->u.fld[0]).rt_rtx))); if ( ((enum rtx_code) (_rtx)->code) != VALUE) rtl_check_failed_flag ("SP_DERIVED_VALUE_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 912, __FUNCTION__); _rtx; })->call) |
913 | && CONST_INT_P (XEXP (l->loc, 1))(((enum rtx_code) ((((l->loc)->u.fld[1]).rt_rtx))->code ) == CONST_INT)) |
914 | { |
915 | if (*off == NULL_RTX(rtx) 0) |
916 | *off = XEXP (l->loc, 1)(((l->loc)->u.fld[1]).rt_rtx); |
917 | else |
918 | *off = plus_constant (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))), *off, |
919 | INTVAL (XEXP (l->loc, 1))(((((l->loc)->u.fld[1]).rt_rtx))->u.hwint[0])); |
920 | if (*off == const0_rtx(const_int_rtx[64])) |
921 | *off = NULL_RTX(rtx) 0; |
922 | return XEXP (l->loc, 0)(((l->loc)->u.fld[0]).rt_rtx); |
923 | } |
924 | } |
925 | } |
926 | return x; |
927 | } |
928 | |
929 | /* Return nonzero if we can prove that X and Y contain the same value, |
930 | taking our gathered information into account. MEMMODE holds the |
931 | mode of the enclosing MEM, if any, as required to deal with autoinc |
932 | addressing modes. If X and Y are not (known to be) part of |
933 | addresses, MEMMODE should be VOIDmode. */ |
934 | |
935 | int |
936 | rtx_equal_for_cselib_1 (rtx x, rtx y, machine_mode memmode, int depth) |
937 | { |
938 | enum rtx_code code; |
939 | const char *fmt; |
940 | int i; |
941 | |
942 | if (REG_P (x)(((enum rtx_code) (x)->code) == REG) || MEM_P (x)(((enum rtx_code) (x)->code) == MEM)) |
943 | { |
944 | cselib_val *e = cselib_lookup (x, GET_MODE (x)((machine_mode) (x)->mode), 0, memmode); |
945 | |
946 | if (e) |
947 | x = e->val_rtx; |
948 | } |
949 | |
950 | if (REG_P (y)(((enum rtx_code) (y)->code) == REG) || MEM_P (y)(((enum rtx_code) (y)->code) == MEM)) |
951 | { |
952 | cselib_val *e = cselib_lookup (y, GET_MODE (y)((machine_mode) (y)->mode), 0, memmode); |
953 | |
954 | if (e) |
955 | y = e->val_rtx; |
956 | } |
957 | |
958 | if (x == y) |
959 | return 1; |
960 | |
961 | if (GET_CODE (x)((enum rtx_code) (x)->code) == VALUE) |
962 | { |
963 | cselib_val *e = canonical_cselib_val (CSELIB_VAL_PTR (x)(((x)->u.fld[0]).rt_cselib)); |
964 | struct elt_loc_list *l; |
965 | |
966 | if (GET_CODE (y)((enum rtx_code) (y)->code) == VALUE) |
967 | return e == canonical_cselib_val (CSELIB_VAL_PTR (y)(((y)->u.fld[0]).rt_cselib)); |
968 | |
969 | if ((SP_DERIVED_VALUE_P (x)(__extension__ ({ __typeof ((x)) const _rtx = ((x)); if (((enum rtx_code) (_rtx)->code) != VALUE) rtl_check_failed_flag ( "SP_DERIVED_VALUE_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 969, __FUNCTION__); _rtx; })->call) |
970 | || SP_DERIVED_VALUE_P (e->val_rtx)(__extension__ ({ __typeof ((e->val_rtx)) const _rtx = ((e ->val_rtx)); if (((enum rtx_code) (_rtx)->code) != VALUE ) rtl_check_failed_flag ("SP_DERIVED_VALUE_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 970, __FUNCTION__); _rtx; })->call)) |
971 | && GET_MODE (y)((machine_mode) (y)->mode) == Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode)))) |
972 | { |
973 | rtx yoff = NULLnullptr; |
974 | rtx yr = autoinc_split (y, &yoff, memmode); |
975 | if ((yr == x || yr == e->val_rtx) && yoff == NULL_RTX(rtx) 0) |
976 | return 1; |
977 | } |
978 | |
979 | if (depth == 128) |
980 | return 0; |
981 | |
982 | for (l = e->locs; l; l = l->next) |
983 | { |
984 | rtx t = l->loc; |
985 | |
986 | /* Avoid infinite recursion. We know we have the canonical |
987 | value, so we can just skip any values in the equivalence |
988 | list. */ |
989 | if (REG_P (t)(((enum rtx_code) (t)->code) == REG) || MEM_P (t)(((enum rtx_code) (t)->code) == MEM) || GET_CODE (t)((enum rtx_code) (t)->code) == VALUE) |
990 | continue; |
991 | else if (rtx_equal_for_cselib_1 (t, y, memmode, depth + 1)) |
992 | return 1; |
993 | } |
994 | |
995 | return 0; |
996 | } |
997 | else if (GET_CODE (y)((enum rtx_code) (y)->code) == VALUE) |
998 | { |
999 | cselib_val *e = canonical_cselib_val (CSELIB_VAL_PTR (y)(((y)->u.fld[0]).rt_cselib)); |
1000 | struct elt_loc_list *l; |
1001 | |
1002 | if ((SP_DERIVED_VALUE_P (y)(__extension__ ({ __typeof ((y)) const _rtx = ((y)); if (((enum rtx_code) (_rtx)->code) != VALUE) rtl_check_failed_flag ( "SP_DERIVED_VALUE_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 1002, __FUNCTION__); _rtx; })->call) |
1003 | || SP_DERIVED_VALUE_P (e->val_rtx)(__extension__ ({ __typeof ((e->val_rtx)) const _rtx = ((e ->val_rtx)); if (((enum rtx_code) (_rtx)->code) != VALUE ) rtl_check_failed_flag ("SP_DERIVED_VALUE_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 1003, __FUNCTION__); _rtx; })->call)) |
1004 | && GET_MODE (x)((machine_mode) (x)->mode) == Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode)))) |
1005 | { |
1006 | rtx xoff = NULLnullptr; |
1007 | rtx xr = autoinc_split (x, &xoff, memmode); |
1008 | if ((xr == y || xr == e->val_rtx) && xoff == NULL_RTX(rtx) 0) |
1009 | return 1; |
1010 | } |
1011 | |
1012 | if (depth == 128) |
1013 | return 0; |
1014 | |
1015 | for (l = e->locs; l; l = l->next) |
1016 | { |
1017 | rtx t = l->loc; |
1018 | |
1019 | if (REG_P (t)(((enum rtx_code) (t)->code) == REG) || MEM_P (t)(((enum rtx_code) (t)->code) == MEM) || GET_CODE (t)((enum rtx_code) (t)->code) == VALUE) |
1020 | continue; |
1021 | else if (rtx_equal_for_cselib_1 (x, t, memmode, depth + 1)) |
1022 | return 1; |
1023 | } |
1024 | |
1025 | return 0; |
1026 | } |
1027 | |
1028 | if (GET_MODE (x)((machine_mode) (x)->mode) != GET_MODE (y)((machine_mode) (y)->mode)) |
1029 | return 0; |
1030 | |
1031 | if (GET_CODE (x)((enum rtx_code) (x)->code) != GET_CODE (y)((enum rtx_code) (y)->code) |
1032 | || (GET_CODE (x)((enum rtx_code) (x)->code) == PLUS |
1033 | && GET_MODE (x)((machine_mode) (x)->mode) == Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))) |
1034 | && CONST_INT_P (XEXP (x, 1))(((enum rtx_code) ((((x)->u.fld[1]).rt_rtx))->code) == CONST_INT ) |
1035 | && CONST_INT_P (XEXP (y, 1))(((enum rtx_code) ((((y)->u.fld[1]).rt_rtx))->code) == CONST_INT ))) |
1036 | { |
1037 | rtx xorig = x, yorig = y; |
1038 | rtx xoff = NULLnullptr, yoff = NULLnullptr; |
1039 | |
1040 | x = autoinc_split (x, &xoff, memmode); |
1041 | y = autoinc_split (y, &yoff, memmode); |
1042 | |
1043 | /* Don't recurse if nothing changed. */ |
1044 | if (x != xorig || y != yorig) |
1045 | { |
1046 | if (!xoff != !yoff) |
1047 | return 0; |
1048 | |
1049 | if (xoff && !rtx_equal_for_cselib_1 (xoff, yoff, memmode, depth)) |
1050 | return 0; |
1051 | |
1052 | return rtx_equal_for_cselib_1 (x, y, memmode, depth); |
1053 | } |
1054 | |
1055 | if (GET_CODE (xorig)((enum rtx_code) (xorig)->code) != GET_CODE (yorig)((enum rtx_code) (yorig)->code)) |
1056 | return 0; |
1057 | } |
1058 | |
1059 | /* These won't be handled correctly by the code below. */ |
1060 | switch (GET_CODE (x)((enum rtx_code) (x)->code)) |
1061 | { |
1062 | CASE_CONST_UNIQUEcase CONST_INT: case CONST_WIDE_INT: case CONST_POLY_INT: case CONST_DOUBLE: case CONST_FIXED: |
1063 | case DEBUG_EXPR: |
1064 | return 0; |
1065 | |
1066 | case CONST_VECTOR: |
1067 | if (!same_vector_encodings_p (x, y)) |
1068 | return false; |
1069 | break; |
1070 | |
1071 | case DEBUG_IMPLICIT_PTR: |
1072 | return DEBUG_IMPLICIT_PTR_DECL (x)(((x)->u.fld[0]).rt_tree) |
1073 | == DEBUG_IMPLICIT_PTR_DECL (y)(((y)->u.fld[0]).rt_tree); |
1074 | |
1075 | case DEBUG_PARAMETER_REF: |
1076 | return DEBUG_PARAMETER_REF_DECL (x)(((x)->u.fld[0]).rt_tree) |
1077 | == DEBUG_PARAMETER_REF_DECL (y)(((y)->u.fld[0]).rt_tree); |
1078 | |
1079 | case ENTRY_VALUE: |
1080 | /* ENTRY_VALUEs are function invariant, it is thus undesirable to |
1081 | use rtx_equal_for_cselib_1 to compare the operands. */ |
1082 | return rtx_equal_p (ENTRY_VALUE_EXP (x)(((x)->u.fld[0]).rt_rtx), ENTRY_VALUE_EXP (y)(((y)->u.fld[0]).rt_rtx)); |
1083 | |
1084 | case LABEL_REF: |
1085 | return label_ref_label (x) == label_ref_label (y); |
1086 | |
1087 | case REG: |
1088 | return REGNO (x)(rhs_regno(x)) == REGNO (y)(rhs_regno(y)); |
1089 | |
1090 | case MEM: |
1091 | /* We have to compare any autoinc operations in the addresses |
1092 | using this MEM's mode. */ |
1093 | return rtx_equal_for_cselib_1 (XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), XEXP (y, 0)(((y)->u.fld[0]).rt_rtx), GET_MODE (x)((machine_mode) (x)->mode), |
1094 | depth); |
1095 | |
1096 | default: |
1097 | break; |
1098 | } |
1099 | |
1100 | code = GET_CODE (x)((enum rtx_code) (x)->code); |
1101 | fmt = GET_RTX_FORMAT (code)(rtx_format[(int) (code)]); |
1102 | |
1103 | for (i = GET_RTX_LENGTH (code)(rtx_length[(int) (code)]) - 1; i >= 0; i--) |
1104 | { |
1105 | int j; |
1106 | |
1107 | switch (fmt[i]) |
1108 | { |
1109 | case 'w': |
1110 | if (XWINT (x, i)((x)->u.hwint[i]) != XWINT (y, i)((y)->u.hwint[i])) |
1111 | return 0; |
1112 | break; |
1113 | |
1114 | case 'n': |
1115 | case 'i': |
1116 | if (XINT (x, i)(((x)->u.fld[i]).rt_int) != XINT (y, i)(((y)->u.fld[i]).rt_int)) |
1117 | return 0; |
1118 | break; |
1119 | |
1120 | case 'p': |
1121 | if (maybe_ne (SUBREG_BYTE (x)(((x)->u.fld[1]).rt_subreg), SUBREG_BYTE (y)(((y)->u.fld[1]).rt_subreg))) |
1122 | return 0; |
1123 | break; |
1124 | |
1125 | case 'V': |
1126 | case 'E': |
1127 | /* Two vectors must have the same length. */ |
1128 | if (XVECLEN (x, i)(((((x)->u.fld[i]).rt_rtvec))->num_elem) != XVECLEN (y, i)(((((y)->u.fld[i]).rt_rtvec))->num_elem)) |
1129 | return 0; |
1130 | |
1131 | /* And the corresponding elements must match. */ |
1132 | for (j = 0; j < XVECLEN (x, i)(((((x)->u.fld[i]).rt_rtvec))->num_elem); j++) |
1133 | if (! rtx_equal_for_cselib_1 (XVECEXP (x, i, j)(((((x)->u.fld[i]).rt_rtvec))->elem[j]), |
1134 | XVECEXP (y, i, j)(((((y)->u.fld[i]).rt_rtvec))->elem[j]), memmode, depth)) |
1135 | return 0; |
1136 | break; |
1137 | |
1138 | case 'e': |
1139 | if (i == 1 |
1140 | && targetm.commutative_p (x, UNKNOWN) |
1141 | && rtx_equal_for_cselib_1 (XEXP (x, 1)(((x)->u.fld[1]).rt_rtx), XEXP (y, 0)(((y)->u.fld[0]).rt_rtx), memmode, |
1142 | depth) |
1143 | && rtx_equal_for_cselib_1 (XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), XEXP (y, 1)(((y)->u.fld[1]).rt_rtx), memmode, |
1144 | depth)) |
1145 | return 1; |
1146 | if (! rtx_equal_for_cselib_1 (XEXP (x, i)(((x)->u.fld[i]).rt_rtx), XEXP (y, i)(((y)->u.fld[i]).rt_rtx), memmode, |
1147 | depth)) |
1148 | return 0; |
1149 | break; |
1150 | |
1151 | case 'S': |
1152 | case 's': |
1153 | if (strcmp (XSTR (x, i)(((x)->u.fld[i]).rt_str), XSTR (y, i)(((y)->u.fld[i]).rt_str))) |
1154 | return 0; |
1155 | break; |
1156 | |
1157 | case 'u': |
1158 | /* These are just backpointers, so they don't matter. */ |
1159 | break; |
1160 | |
1161 | case '0': |
1162 | case 't': |
1163 | break; |
1164 | |
1165 | /* It is believed that rtx's at this level will never |
1166 | contain anything but integers and other rtx's, |
1167 | except for within LABEL_REFs and SYMBOL_REFs. */ |
1168 | default: |
1169 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 1169, __FUNCTION__)); |
1170 | } |
1171 | } |
1172 | return 1; |
1173 | } |
1174 | |
1175 | /* Wrapper for rtx_equal_for_cselib_p to determine whether a SET is |
1176 | truly redundant, taking into account aliasing information. */ |
1177 | bool |
1178 | cselib_redundant_set_p (rtx set) |
1179 | { |
1180 | gcc_assert (GET_CODE (set) == SET)((void)(!(((enum rtx_code) (set)->code) == SET) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 1180, __FUNCTION__), 0 : 0)); |
1181 | rtx dest = SET_DEST (set)(((set)->u.fld[0]).rt_rtx); |
1182 | if (cselib_reg_set_mode (dest) != GET_MODE (dest)((machine_mode) (dest)->mode)) |
1183 | return false; |
1184 | |
1185 | if (!rtx_equal_for_cselib_p (dest, SET_SRC (set)(((set)->u.fld[1]).rt_rtx))) |
1186 | return false; |
1187 | |
1188 | while (GET_CODE (dest)((enum rtx_code) (dest)->code) == SUBREG |
1189 | || GET_CODE (dest)((enum rtx_code) (dest)->code) == ZERO_EXTRACT |
1190 | || GET_CODE (dest)((enum rtx_code) (dest)->code) == STRICT_LOW_PART) |
1191 | dest = XEXP (dest, 0)(((dest)->u.fld[0]).rt_rtx); |
1192 | |
1193 | if (!flag_strict_aliasingglobal_options.x_flag_strict_aliasing || !MEM_P (dest)(((enum rtx_code) (dest)->code) == MEM)) |
1194 | return true; |
1195 | |
1196 | /* For a store we need to check that suppressing it will not change |
1197 | the effective alias set. */ |
1198 | rtx dest_addr = XEXP (dest, 0)(((dest)->u.fld[0]).rt_rtx); |
1199 | |
1200 | /* Lookup the equivalents to the original dest (rather than just the |
1201 | MEM). */ |
1202 | cselib_val *src_val = cselib_lookup (SET_DEST (set)(((set)->u.fld[0]).rt_rtx), |
1203 | GET_MODE (SET_DEST (set))((machine_mode) ((((set)->u.fld[0]).rt_rtx))->mode), |
1204 | 0, VOIDmode((void) 0, E_VOIDmode)); |
1205 | |
1206 | if (src_val) |
1207 | { |
1208 | /* Walk the list of source equivalents to find the MEM accessing |
1209 | the same location. */ |
1210 | for (elt_loc_list *l = src_val->locs; l; l = l->next) |
1211 | { |
1212 | rtx src_equiv = l->loc; |
1213 | while (GET_CODE (src_equiv)((enum rtx_code) (src_equiv)->code) == SUBREG |
1214 | || GET_CODE (src_equiv)((enum rtx_code) (src_equiv)->code) == ZERO_EXTRACT |
1215 | || GET_CODE (src_equiv)((enum rtx_code) (src_equiv)->code) == STRICT_LOW_PART) |
1216 | src_equiv = XEXP (src_equiv, 0)(((src_equiv)->u.fld[0]).rt_rtx); |
1217 | |
1218 | if (MEM_P (src_equiv)(((enum rtx_code) (src_equiv)->code) == MEM)) |
1219 | { |
1220 | /* Match the MEMs by comparing the addresses. We can |
1221 | only remove the later store if the earlier aliases at |
1222 | least all the accesses of the later one. */ |
1223 | if (rtx_equal_for_cselib_1 (dest_addr, XEXP (src_equiv, 0)(((src_equiv)->u.fld[0]).rt_rtx), |
1224 | GET_MODE (dest)((machine_mode) (dest)->mode), 0)) |
1225 | return mems_same_for_tbaa_p (src_equiv, dest); |
1226 | } |
1227 | } |
1228 | } |
1229 | |
1230 | /* We failed to find a recorded value in the cselib history, so try |
1231 | the source of this set; this catches cases such as *p = *q when p |
1232 | and q have the same value. */ |
1233 | rtx src = SET_SRC (set)(((set)->u.fld[1]).rt_rtx); |
1234 | while (GET_CODE (src)((enum rtx_code) (src)->code) == SUBREG) |
1235 | src = XEXP (src, 0)(((src)->u.fld[0]).rt_rtx); |
1236 | |
1237 | if (MEM_P (src)(((enum rtx_code) (src)->code) == MEM) |
1238 | && rtx_equal_for_cselib_1 (dest_addr, XEXP (src, 0)(((src)->u.fld[0]).rt_rtx), GET_MODE (dest)((machine_mode) (dest)->mode), 0)) |
1239 | return mems_same_for_tbaa_p (src, dest); |
1240 | |
1241 | return false; |
1242 | } |
1243 | |
1244 | /* Helper function for cselib_hash_rtx. Arguments like for cselib_hash_rtx, |
1245 | except that it hashes (plus:P x c). */ |
1246 | |
1247 | static unsigned int |
1248 | cselib_hash_plus_const_int (rtx x, HOST_WIDE_INTlong c, int create, |
1249 | machine_mode memmode) |
1250 | { |
1251 | cselib_val *e = cselib_lookup (x, GET_MODE (x)((machine_mode) (x)->mode), create, memmode); |
1252 | if (! e) |
1253 | return 0; |
1254 | |
1255 | if (! SP_DERIVED_VALUE_P (e->val_rtx)(__extension__ ({ __typeof ((e->val_rtx)) const _rtx = ((e ->val_rtx)); if (((enum rtx_code) (_rtx)->code) != VALUE ) rtl_check_failed_flag ("SP_DERIVED_VALUE_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 1255, __FUNCTION__); _rtx; })->call)) |
1256 | for (struct elt_loc_list *l = e->locs; l; l = l->next) |
1257 | if (GET_CODE (l->loc)((enum rtx_code) (l->loc)->code) == PLUS |
1258 | && GET_CODE (XEXP (l->loc, 0))((enum rtx_code) ((((l->loc)->u.fld[0]).rt_rtx))->code ) == VALUE |
1259 | && SP_DERIVED_VALUE_P (XEXP (l->loc, 0))(__extension__ ({ __typeof (((((l->loc)->u.fld[0]).rt_rtx ))) const _rtx = (((((l->loc)->u.fld[0]).rt_rtx))); if ( ((enum rtx_code) (_rtx)->code) != VALUE) rtl_check_failed_flag ("SP_DERIVED_VALUE_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 1259, __FUNCTION__); _rtx; })->call) |
1260 | && CONST_INT_P (XEXP (l->loc, 1))(((enum rtx_code) ((((l->loc)->u.fld[1]).rt_rtx))->code ) == CONST_INT)) |
1261 | { |
1262 | e = CSELIB_VAL_PTR (XEXP (l->loc, 0))((((((l->loc)->u.fld[0]).rt_rtx))->u.fld[0]).rt_cselib ); |
1263 | c = trunc_int_for_mode (c + UINTVAL (XEXP (l->loc, 1))((unsigned long) (((((l->loc)->u.fld[1]).rt_rtx))->u .hwint[0])), Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode)))); |
1264 | break; |
1265 | } |
1266 | if (c == 0) |
1267 | return e->hash; |
1268 | |
1269 | unsigned hash = (unsigned) PLUS + (unsigned) GET_MODE (x)((machine_mode) (x)->mode); |
1270 | hash += e->hash; |
1271 | unsigned int tem_hash = (unsigned) CONST_INT + (unsigned) VOIDmode((void) 0, E_VOIDmode); |
1272 | tem_hash += ((unsigned) CONST_INT << 7) + (unsigned HOST_WIDE_INTlong) c; |
1273 | if (tem_hash == 0) |
1274 | tem_hash = (unsigned int) CONST_INT; |
1275 | hash += tem_hash; |
1276 | return hash ? hash : 1 + (unsigned int) PLUS; |
1277 | } |
1278 | |
1279 | /* Hash an rtx. Return 0 if we couldn't hash the rtx. |
1280 | For registers and memory locations, we look up their cselib_val structure |
1281 | and return its VALUE element. |
1282 | Possible reasons for return 0 are: the object is volatile, or we couldn't |
1283 | find a register or memory location in the table and CREATE is zero. If |
1284 | CREATE is nonzero, table elts are created for regs and mem. |
1285 | N.B. this hash function returns the same hash value for RTXes that |
1286 | differ only in the order of operands, thus it is suitable for comparisons |
1287 | that take commutativity into account. |
1288 | If we wanted to also support associative rules, we'd have to use a different |
1289 | strategy to avoid returning spurious 0, e.g. return ~(~0U >> 1) . |
1290 | MEMMODE indicates the mode of an enclosing MEM, and it's only |
1291 | used to compute autoinc values. |
1292 | We used to have a MODE argument for hashing for CONST_INTs, but that |
1293 | didn't make sense, since it caused spurious hash differences between |
1294 | (set (reg:SI 1) (const_int)) |
1295 | (plus:SI (reg:SI 2) (reg:SI 1)) |
1296 | and |
1297 | (plus:SI (reg:SI 2) (const_int)) |
1298 | If the mode is important in any context, it must be checked specifically |
1299 | in a comparison anyway, since relying on hash differences is unsafe. */ |
1300 | |
1301 | static unsigned int |
1302 | cselib_hash_rtx (rtx x, int create, machine_mode memmode) |
1303 | { |
1304 | cselib_val *e; |
1305 | poly_int64 offset; |
1306 | int i, j; |
1307 | enum rtx_code code; |
1308 | const char *fmt; |
1309 | unsigned int hash = 0; |
1310 | |
1311 | code = GET_CODE (x)((enum rtx_code) (x)->code); |
1312 | hash += (unsigned) code + (unsigned) GET_MODE (x)((machine_mode) (x)->mode); |
1313 | |
1314 | switch (code) |
1315 | { |
1316 | case VALUE: |
1317 | e = CSELIB_VAL_PTR (x)(((x)->u.fld[0]).rt_cselib); |
1318 | return e->hash; |
1319 | |
1320 | case MEM: |
1321 | case REG: |
1322 | e = cselib_lookup (x, GET_MODE (x)((machine_mode) (x)->mode), create, memmode); |
1323 | if (! e) |
1324 | return 0; |
1325 | |
1326 | return e->hash; |
1327 | |
1328 | case DEBUG_EXPR: |
1329 | hash += ((unsigned) DEBUG_EXPR << 7) |
1330 | + DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))(-((contains_struct_check (((tree_check ((((((x)->u.fld[0] ).rt_tree))), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 1330, __FUNCTION__, (DEBUG_EXPR_DECL)))), (TS_DECL_MINIMAL) , "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 1330, __FUNCTION__))->decl_minimal.uid)); |
1331 | return hash ? hash : (unsigned int) DEBUG_EXPR; |
1332 | |
1333 | case DEBUG_IMPLICIT_PTR: |
1334 | hash += ((unsigned) DEBUG_IMPLICIT_PTR << 7) |
1335 | + DECL_UID (DEBUG_IMPLICIT_PTR_DECL (x))((contains_struct_check (((((x)->u.fld[0]).rt_tree)), (TS_DECL_MINIMAL ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 1335, __FUNCTION__))->decl_minimal.uid); |
1336 | return hash ? hash : (unsigned int) DEBUG_IMPLICIT_PTR; |
1337 | |
1338 | case DEBUG_PARAMETER_REF: |
1339 | hash += ((unsigned) DEBUG_PARAMETER_REF << 7) |
1340 | + DECL_UID (DEBUG_PARAMETER_REF_DECL (x))((contains_struct_check (((((x)->u.fld[0]).rt_tree)), (TS_DECL_MINIMAL ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 1340, __FUNCTION__))->decl_minimal.uid); |
1341 | return hash ? hash : (unsigned int) DEBUG_PARAMETER_REF; |
1342 | |
1343 | case ENTRY_VALUE: |
1344 | /* ENTRY_VALUEs are function invariant, thus try to avoid |
1345 | recursing on argument if ENTRY_VALUE is one of the |
1346 | forms emitted by expand_debug_expr, otherwise |
1347 | ENTRY_VALUE hash would depend on the current value |
1348 | in some register or memory. */ |
1349 | if (REG_P (ENTRY_VALUE_EXP (x))(((enum rtx_code) ((((x)->u.fld[0]).rt_rtx))->code) == REG )) |
1350 | hash += (unsigned int) REG |
1351 | + (unsigned int) GET_MODE (ENTRY_VALUE_EXP (x))((machine_mode) ((((x)->u.fld[0]).rt_rtx))->mode) |
1352 | + (unsigned int) REGNO (ENTRY_VALUE_EXP (x))(rhs_regno((((x)->u.fld[0]).rt_rtx))); |
1353 | else if (MEM_P (ENTRY_VALUE_EXP (x))(((enum rtx_code) ((((x)->u.fld[0]).rt_rtx))->code) == MEM ) |
1354 | && REG_P (XEXP (ENTRY_VALUE_EXP (x), 0))(((enum rtx_code) (((((((x)->u.fld[0]).rt_rtx))->u.fld[ 0]).rt_rtx))->code) == REG)) |
1355 | hash += (unsigned int) MEM |
1356 | + (unsigned int) GET_MODE (XEXP (ENTRY_VALUE_EXP (x), 0))((machine_mode) (((((((x)->u.fld[0]).rt_rtx))->u.fld[0] ).rt_rtx))->mode) |
1357 | + (unsigned int) REGNO (XEXP (ENTRY_VALUE_EXP (x), 0))(rhs_regno(((((((x)->u.fld[0]).rt_rtx))->u.fld[0]).rt_rtx ))); |
1358 | else |
1359 | hash += cselib_hash_rtx (ENTRY_VALUE_EXP (x)(((x)->u.fld[0]).rt_rtx), create, memmode); |
1360 | return hash ? hash : (unsigned int) ENTRY_VALUE; |
1361 | |
1362 | case CONST_INT: |
1363 | hash += ((unsigned) CONST_INT << 7) + UINTVAL (x)((unsigned long) ((x)->u.hwint[0])); |
1364 | return hash ? hash : (unsigned int) CONST_INT; |
1365 | |
1366 | case CONST_WIDE_INT: |
1367 | for (i = 0; i < CONST_WIDE_INT_NUNITS (x)((int)__extension__ ({ __typeof ((x)) const _rtx = ((x)); if ( ((enum rtx_code) (_rtx)->code) != CONST_WIDE_INT) rtl_check_failed_flag ("CWI_GET_NUM_ELEM", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 1367, __FUNCTION__); _rtx; })->u2.num_elem); i++) |
1368 | hash += CONST_WIDE_INT_ELT (x, i)((x)->u.hwiv.elem[i]); |
1369 | return hash; |
1370 | |
1371 | case CONST_POLY_INT: |
1372 | { |
1373 | inchash::hash h; |
1374 | h.add_int (hash); |
1375 | for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS1; ++i) |
1376 | h.add_wide_int (CONST_POLY_INT_COEFFS (x)(__extension__ ({ __typeof ((x)) const _rtx = ((x)); if (((enum rtx_code) (_rtx)->code) != CONST_POLY_INT) rtl_check_failed_flag ("CONST_POLY_INT_COEFFS", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 1376, __FUNCTION__); _rtx; })->u.cpi.coeffs)[i]); |
1377 | return h.end (); |
1378 | } |
1379 | |
1380 | case CONST_DOUBLE: |
1381 | /* This is like the general case, except that it only counts |
1382 | the integers representing the constant. */ |
1383 | hash += (unsigned) code + (unsigned) GET_MODE (x)((machine_mode) (x)->mode); |
1384 | if (TARGET_SUPPORTS_WIDE_INT1 == 0 && GET_MODE (x)((machine_mode) (x)->mode) == VOIDmode((void) 0, E_VOIDmode)) |
1385 | hash += ((unsigned) CONST_DOUBLE_LOW (x)((x)->u.hwint[0]) |
1386 | + (unsigned) CONST_DOUBLE_HIGH (x)((x)->u.hwint[1])); |
1387 | else |
1388 | hash += real_hash (CONST_DOUBLE_REAL_VALUE (x)((const struct real_value *) (&(x)->u.rv))); |
1389 | return hash ? hash : (unsigned int) CONST_DOUBLE; |
1390 | |
1391 | case CONST_FIXED: |
1392 | hash += (unsigned int) code + (unsigned int) GET_MODE (x)((machine_mode) (x)->mode); |
1393 | hash += fixed_hash (CONST_FIXED_VALUE (x)((const struct fixed_value *) (&(x)->u.fv))); |
1394 | return hash ? hash : (unsigned int) CONST_FIXED; |
1395 | |
1396 | case CONST_VECTOR: |
1397 | { |
1398 | int units; |
1399 | rtx elt; |
1400 | |
1401 | units = const_vector_encoded_nelts (x); |
1402 | |
1403 | for (i = 0; i < units; ++i) |
1404 | { |
1405 | elt = CONST_VECTOR_ENCODED_ELT (x, i)(((((x)->u.fld[0]).rt_rtvec))->elem[i]); |
1406 | hash += cselib_hash_rtx (elt, 0, memmode); |
1407 | } |
1408 | |
1409 | return hash; |
1410 | } |
1411 | |
1412 | /* Assume there is only one rtx object for any given label. */ |
1413 | case LABEL_REF: |
1414 | /* We don't hash on the address of the CODE_LABEL to avoid bootstrap |
1415 | differences and differences between each stage's debugging dumps. */ |
1416 | hash += (((unsigned int) LABEL_REF << 7) |
1417 | + CODE_LABEL_NUMBER (label_ref_label (x))(((label_ref_label (x))->u.fld[5]).rt_int)); |
1418 | return hash ? hash : (unsigned int) LABEL_REF; |
1419 | |
1420 | case SYMBOL_REF: |
1421 | { |
1422 | /* Don't hash on the symbol's address to avoid bootstrap differences. |
1423 | Different hash values may cause expressions to be recorded in |
1424 | different orders and thus different registers to be used in the |
1425 | final assembler. This also avoids differences in the dump files |
1426 | between various stages. */ |
1427 | unsigned int h = 0; |
1428 | const unsigned char *p = (const unsigned char *) XSTR (x, 0)(((x)->u.fld[0]).rt_str); |
1429 | |
1430 | while (*p) |
1431 | h += (h << 7) + *p++; /* ??? revisit */ |
1432 | |
1433 | hash += ((unsigned int) SYMBOL_REF << 7) + h; |
1434 | return hash ? hash : (unsigned int) SYMBOL_REF; |
1435 | } |
1436 | |
1437 | case PRE_DEC: |
1438 | case PRE_INC: |
1439 | /* We can't compute these without knowing the MEM mode. */ |
1440 | gcc_assert (memmode != VOIDmode)((void)(!(memmode != ((void) 0, E_VOIDmode)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 1440, __FUNCTION__), 0 : 0)); |
1441 | offset = GET_MODE_SIZE (memmode); |
1442 | if (code == PRE_DEC) |
1443 | offset = -offset; |
1444 | /* Adjust the hash so that (mem:MEMMODE (pre_* (reg))) hashes |
1445 | like (mem:MEMMODE (plus (reg) (const_int I))). */ |
1446 | if (GET_MODE (x)((machine_mode) (x)->mode) == Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))) |
1447 | && (REG_P (XEXP (x, 0))(((enum rtx_code) ((((x)->u.fld[0]).rt_rtx))->code) == REG ) |
1448 | || MEM_P (XEXP (x, 0))(((enum rtx_code) ((((x)->u.fld[0]).rt_rtx))->code) == MEM ) |
1449 | || GET_CODE (XEXP (x, 0))((enum rtx_code) ((((x)->u.fld[0]).rt_rtx))->code) == VALUE)) |
1450 | { |
1451 | HOST_WIDE_INTlong c; |
1452 | if (offset.is_constant (&c)) |
1453 | return cselib_hash_plus_const_int (XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), |
1454 | trunc_int_for_mode (c, Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode)))), |
1455 | create, memmode); |
1456 | } |
1457 | hash = ((unsigned) PLUS + (unsigned) GET_MODE (x)((machine_mode) (x)->mode) |
1458 | + cselib_hash_rtx (XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), create, memmode) |
1459 | + cselib_hash_rtx (gen_int_mode (offset, GET_MODE (x)((machine_mode) (x)->mode)), |
1460 | create, memmode)); |
1461 | return hash ? hash : 1 + (unsigned) PLUS; |
1462 | |
1463 | case PRE_MODIFY: |
1464 | gcc_assert (memmode != VOIDmode)((void)(!(memmode != ((void) 0, E_VOIDmode)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 1464, __FUNCTION__), 0 : 0)); |
1465 | return cselib_hash_rtx (XEXP (x, 1)(((x)->u.fld[1]).rt_rtx), create, memmode); |
1466 | |
1467 | case POST_DEC: |
1468 | case POST_INC: |
1469 | case POST_MODIFY: |
1470 | gcc_assert (memmode != VOIDmode)((void)(!(memmode != ((void) 0, E_VOIDmode)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 1470, __FUNCTION__), 0 : 0)); |
1471 | return cselib_hash_rtx (XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), create, memmode); |
1472 | |
1473 | case PC: |
1474 | case CALL: |
1475 | case UNSPEC_VOLATILE: |
1476 | return 0; |
1477 | |
1478 | case ASM_OPERANDS: |
1479 | if (MEM_VOLATILE_P (x)(__extension__ ({ __typeof ((x)) const _rtx = ((x)); if (((enum rtx_code) (_rtx)->code) != MEM && ((enum rtx_code ) (_rtx)->code) != ASM_OPERANDS && ((enum rtx_code ) (_rtx)->code) != ASM_INPUT) rtl_check_failed_flag ("MEM_VOLATILE_P" , _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 1479, __FUNCTION__); _rtx; })->volatil)) |
1480 | return 0; |
1481 | |
1482 | break; |
1483 | |
1484 | case PLUS: |
1485 | if (GET_MODE (x)((machine_mode) (x)->mode) == Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))) |
1486 | && (REG_P (XEXP (x, 0))(((enum rtx_code) ((((x)->u.fld[0]).rt_rtx))->code) == REG ) |
1487 | || MEM_P (XEXP (x, 0))(((enum rtx_code) ((((x)->u.fld[0]).rt_rtx))->code) == MEM ) |
1488 | || GET_CODE (XEXP (x, 0))((enum rtx_code) ((((x)->u.fld[0]).rt_rtx))->code) == VALUE) |
1489 | && CONST_INT_P (XEXP (x, 1))(((enum rtx_code) ((((x)->u.fld[1]).rt_rtx))->code) == CONST_INT )) |
1490 | return cselib_hash_plus_const_int (XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), INTVAL (XEXP (x, 1))(((((x)->u.fld[1]).rt_rtx))->u.hwint[0]), |
1491 | create, memmode); |
1492 | break; |
1493 | |
1494 | default: |
1495 | break; |
1496 | } |
1497 | |
1498 | i = GET_RTX_LENGTH (code)(rtx_length[(int) (code)]) - 1; |
1499 | fmt = GET_RTX_FORMAT (code)(rtx_format[(int) (code)]); |
1500 | for (; i >= 0; i--) |
1501 | { |
1502 | switch (fmt[i]) |
1503 | { |
1504 | case 'e': |
1505 | { |
1506 | rtx tem = XEXP (x, i)(((x)->u.fld[i]).rt_rtx); |
1507 | unsigned int tem_hash = cselib_hash_rtx (tem, create, memmode); |
1508 | |
1509 | if (tem_hash == 0) |
1510 | return 0; |
1511 | |
1512 | hash += tem_hash; |
1513 | } |
1514 | break; |
1515 | case 'E': |
1516 | for (j = 0; j < XVECLEN (x, i)(((((x)->u.fld[i]).rt_rtvec))->num_elem); j++) |
1517 | { |
1518 | unsigned int tem_hash |
1519 | = cselib_hash_rtx (XVECEXP (x, i, j)(((((x)->u.fld[i]).rt_rtvec))->elem[j]), create, memmode); |
1520 | |
1521 | if (tem_hash == 0) |
1522 | return 0; |
1523 | |
1524 | hash += tem_hash; |
1525 | } |
1526 | break; |
1527 | |
1528 | case 's': |
1529 | { |
1530 | const unsigned char *p = (const unsigned char *) XSTR (x, i)(((x)->u.fld[i]).rt_str); |
1531 | |
1532 | if (p) |
1533 | while (*p) |
1534 | hash += *p++; |
1535 | break; |
1536 | } |
1537 | |
1538 | case 'i': |
1539 | hash += XINT (x, i)(((x)->u.fld[i]).rt_int); |
1540 | break; |
1541 | |
1542 | case 'p': |
1543 | hash += constant_lower_bound (SUBREG_BYTE (x)(((x)->u.fld[1]).rt_subreg)); |
1544 | break; |
1545 | |
1546 | case '0': |
1547 | case 't': |
1548 | /* unused */ |
1549 | break; |
1550 | |
1551 | default: |
1552 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 1552, __FUNCTION__)); |
1553 | } |
1554 | } |
1555 | |
1556 | return hash ? hash : 1 + (unsigned int) GET_CODE (x)((enum rtx_code) (x)->code); |
1557 | } |
1558 | |
1559 | /* Create a new value structure for VALUE and initialize it. The mode of the |
1560 | value is MODE. */ |
1561 | |
1562 | static inline cselib_val * |
1563 | new_cselib_val (unsigned int hash, machine_mode mode, rtx x) |
1564 | { |
1565 | cselib_val *e = cselib_val_pool.allocate (); |
1566 | |
1567 | gcc_assert (hash)((void)(!(hash) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 1567, __FUNCTION__), 0 : 0)); |
1568 | gcc_assert (next_uid)((void)(!(next_uid) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 1568, __FUNCTION__), 0 : 0)); |
1569 | |
1570 | e->hash = hash; |
1571 | e->uid = next_uid++; |
1572 | /* We use an alloc pool to allocate this RTL construct because it |
1573 | accounts for about 8% of the overall memory usage. We know |
1574 | precisely when we can have VALUE RTXen (when cselib is active) |
1575 | so we don't need to put them in garbage collected memory. |
1576 | ??? Why should a VALUE be an RTX in the first place? */ |
1577 | e->val_rtx = (rtx_def*) value_pool.allocate (); |
1578 | memset (e->val_rtx, 0, RTX_HDR_SIZE__builtin_offsetof(struct rtx_def, u)); |
1579 | PUT_CODE (e->val_rtx, VALUE)((e->val_rtx)->code = (VALUE)); |
1580 | PUT_MODE (e->val_rtx, mode); |
1581 | CSELIB_VAL_PTR (e->val_rtx)(((e->val_rtx)->u.fld[0]).rt_cselib) = e; |
1582 | e->addr_list = 0; |
1583 | e->locs = 0; |
1584 | e->next_containing_mem = 0; |
1585 | |
1586 | scalar_int_mode int_mode; |
1587 | if (REG_P (x)(((enum rtx_code) (x)->code) == REG) && is_int_mode (mode, &int_mode) |
1588 | && GET_MODE_SIZE (int_mode) > 1 |
1589 | && REG_VALUES (REGNO (x))reg_values[(rhs_regno(x))] != NULLnullptr |
1590 | && (!cselib_current_insn || !DEBUG_INSN_P (cselib_current_insn)(((enum rtx_code) (cselib_current_insn)->code) == DEBUG_INSN ))) |
1591 | { |
1592 | rtx copy = shallow_copy_rtx (x); |
1593 | scalar_int_mode narrow_mode_iter; |
1594 | FOR_EACH_MODE_UNTIL (narrow_mode_iter, int_mode)for ((narrow_mode_iter) = (get_narrowest_mode (int_mode)); (narrow_mode_iter ) != (int_mode); mode_iterator::get_known_next (&(narrow_mode_iter ))) |
1595 | { |
1596 | PUT_MODE_RAW (copy, narrow_mode_iter)((copy)->mode = (narrow_mode_iter)); |
1597 | cselib_val *v = cselib_lookup (copy, narrow_mode_iter, 0, VOIDmode((void) 0, E_VOIDmode)); |
1598 | if (v) |
1599 | { |
1600 | rtx sub = lowpart_subreg (narrow_mode_iter, e->val_rtx, int_mode); |
1601 | if (sub) |
1602 | new_elt_loc_list (v, sub); |
1603 | } |
1604 | } |
1605 | } |
1606 | |
1607 | if (dump_file && (dump_flags & TDF_CSELIB)) |
1608 | { |
1609 | fprintf (dump_file, "cselib value %u:%u ", e->uid, hash); |
1610 | if (flag_dump_noaddrglobal_options.x_flag_dump_noaddr || flag_dump_unnumberedglobal_options.x_flag_dump_unnumbered) |
1611 | fputs ("# ", dump_file); |
1612 | else |
1613 | fprintf (dump_file, "%p ", (void*)e); |
1614 | print_rtl_single (dump_file, x); |
1615 | fputc ('\n', dump_file); |
1616 | } |
1617 | |
1618 | return e; |
1619 | } |
1620 | |
1621 | /* ADDR_ELT is a value that is used as address. MEM_ELT is the value that |
1622 | contains the data at this address. X is a MEM that represents the |
1623 | value. Update the two value structures to represent this situation. */ |
1624 | |
1625 | static void |
1626 | add_mem_for_addr (cselib_val *addr_elt, cselib_val *mem_elt, rtx x) |
1627 | { |
1628 | addr_elt = canonical_cselib_val (addr_elt); |
1629 | mem_elt = canonical_cselib_val (mem_elt); |
1630 | |
1631 | /* Avoid duplicates. */ |
1632 | addr_space_t as = MEM_ADDR_SPACE (x)(get_mem_attrs (x)->addrspace); |
1633 | for (elt_loc_list *l = mem_elt->locs; l; l = l->next) |
1634 | if (MEM_P (l->loc)(((enum rtx_code) (l->loc)->code) == MEM) |
1635 | && CSELIB_VAL_PTR (XEXP (l->loc, 0))((((((l->loc)->u.fld[0]).rt_rtx))->u.fld[0]).rt_cselib ) == addr_elt |
1636 | && MEM_ADDR_SPACE (l->loc)(get_mem_attrs (l->loc)->addrspace) == as) |
1637 | { |
1638 | promote_debug_loc (l); |
1639 | return; |
1640 | } |
1641 | |
1642 | addr_elt->addr_list = new_elt_list (addr_elt->addr_list, mem_elt); |
1643 | new_elt_loc_list (mem_elt, |
1644 | replace_equiv_address_nv (x, addr_elt->val_rtx)); |
1645 | if (mem_elt->next_containing_mem == NULLnullptr) |
1646 | { |
1647 | mem_elt->next_containing_mem = first_containing_mem; |
1648 | first_containing_mem = mem_elt; |
1649 | } |
1650 | } |
1651 | |
1652 | /* Subroutine of cselib_lookup. Return a value for X, which is a MEM rtx. |
1653 | If CREATE, make a new one if we haven't seen it before. */ |
1654 | |
1655 | static cselib_val * |
1656 | cselib_lookup_mem (rtx x, int create) |
1657 | { |
1658 | machine_mode mode = GET_MODE (x)((machine_mode) (x)->mode); |
1659 | machine_mode addr_mode; |
1660 | cselib_val **slot; |
1661 | cselib_val *addr; |
1662 | cselib_val *mem_elt; |
1663 | |
1664 | if (MEM_VOLATILE_P (x)(__extension__ ({ __typeof ((x)) const _rtx = ((x)); if (((enum rtx_code) (_rtx)->code) != MEM && ((enum rtx_code ) (_rtx)->code) != ASM_OPERANDS && ((enum rtx_code ) (_rtx)->code) != ASM_INPUT) rtl_check_failed_flag ("MEM_VOLATILE_P" , _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 1664, __FUNCTION__); _rtx; })->volatil) || mode == BLKmode((void) 0, E_BLKmode) |
1665 | || !cselib_record_memory |
1666 | || (FLOAT_MODE_P (mode)(((enum mode_class) mode_class[mode]) == MODE_FLOAT || ((enum mode_class) mode_class[mode]) == MODE_DECIMAL_FLOAT || ((enum mode_class) mode_class[mode]) == MODE_COMPLEX_FLOAT || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_FLOAT) && flag_float_storeglobal_options.x_flag_float_store)) |
1667 | return 0; |
1668 | |
1669 | addr_mode = GET_MODE (XEXP (x, 0))((machine_mode) ((((x)->u.fld[0]).rt_rtx))->mode); |
1670 | if (addr_mode == VOIDmode((void) 0, E_VOIDmode)) |
1671 | addr_mode = Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))); |
1672 | |
1673 | /* Look up the value for the address. */ |
1674 | addr = cselib_lookup (XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), addr_mode, create, mode); |
1675 | if (! addr) |
1676 | return 0; |
1677 | addr = canonical_cselib_val (addr); |
1678 | |
1679 | /* Find a value that describes a value of our mode at that address. */ |
1680 | addr_space_t as = MEM_ADDR_SPACE (x)(get_mem_attrs (x)->addrspace); |
1681 | for (elt_list *l = addr->addr_list; l; l = l->next) |
1682 | if (GET_MODE (l->elt->val_rtx)((machine_mode) (l->elt->val_rtx)->mode) == mode) |
1683 | { |
1684 | for (elt_loc_list *l2 = l->elt->locs; l2; l2 = l2->next) |
1685 | if (MEM_P (l2->loc)(((enum rtx_code) (l2->loc)->code) == MEM) && MEM_ADDR_SPACE (l2->loc)(get_mem_attrs (l2->loc)->addrspace) == as) |
1686 | { |
1687 | promote_debug_loc (l->elt->locs); |
1688 | return l->elt; |
1689 | } |
1690 | } |
1691 | |
1692 | if (! create) |
1693 | return 0; |
1694 | |
1695 | mem_elt = new_cselib_val (next_uid, mode, x); |
1696 | add_mem_for_addr (addr, mem_elt, x); |
1697 | slot = cselib_find_slot (mode, x, mem_elt->hash, INSERT, VOIDmode((void) 0, E_VOIDmode)); |
1698 | *slot = mem_elt; |
1699 | return mem_elt; |
1700 | } |
1701 | |
1702 | /* Search through the possible substitutions in P. We prefer a non reg |
1703 | substitution because this allows us to expand the tree further. If |
1704 | we find, just a reg, take the lowest regno. There may be several |
1705 | non-reg results, we just take the first one because they will all |
1706 | expand to the same place. */ |
1707 | |
1708 | static rtx |
1709 | expand_loc (struct elt_loc_list *p, struct expand_value_data *evd, |
1710 | int max_depth) |
1711 | { |
1712 | rtx reg_result = NULLnullptr; |
1713 | unsigned int regno = UINT_MAX(2147483647 *2U +1U); |
1714 | struct elt_loc_list *p_in = p; |
1715 | |
1716 | for (; p; p = p->next) |
1717 | { |
1718 | /* Return these right away to avoid returning stack pointer based |
1719 | expressions for frame pointer and vice versa, which is something |
1720 | that would confuse DSE. See the comment in cselib_expand_value_rtx_1 |
1721 | for more details. */ |
1722 | if (REG_P (p->loc)(((enum rtx_code) (p->loc)->code) == REG) |
1723 | && (REGNO (p->loc)(rhs_regno(p->loc)) == STACK_POINTER_REGNUM7 |
1724 | || REGNO (p->loc)(rhs_regno(p->loc)) == FRAME_POINTER_REGNUM19 |
1725 | || REGNO (p->loc)(rhs_regno(p->loc)) == HARD_FRAME_POINTER_REGNUM6 |
1726 | || REGNO (p->loc)(rhs_regno(p->loc)) == cfa_base_preserved_regno)) |
1727 | return p->loc; |
1728 | /* Avoid infinite recursion trying to expand a reg into a |
1729 | the same reg. */ |
1730 | if ((REG_P (p->loc)(((enum rtx_code) (p->loc)->code) == REG)) |
1731 | && (REGNO (p->loc)(rhs_regno(p->loc)) < regno) |
1732 | && !bitmap_bit_p (evd->regs_active, REGNO (p->loc)(rhs_regno(p->loc)))) |
1733 | { |
1734 | reg_result = p->loc; |
1735 | regno = REGNO (p->loc)(rhs_regno(p->loc)); |
1736 | } |
1737 | /* Avoid infinite recursion and do not try to expand the |
1738 | value. */ |
1739 | else if (GET_CODE (p->loc)((enum rtx_code) (p->loc)->code) == VALUE |
1740 | && CSELIB_VAL_PTR (p->loc)(((p->loc)->u.fld[0]).rt_cselib)->locs == p_in) |
1741 | continue; |
1742 | else if (!REG_P (p->loc)(((enum rtx_code) (p->loc)->code) == REG)) |
1743 | { |
1744 | rtx result, note; |
1745 | if (dump_file && (dump_flags & TDF_CSELIB)) |
1746 | { |
1747 | print_inline_rtx (dump_file, p->loc, 0); |
1748 | fprintf (dump_file, "\n"); |
1749 | } |
1750 | if (GET_CODE (p->loc)((enum rtx_code) (p->loc)->code) == LO_SUM |
1751 | && GET_CODE (XEXP (p->loc, 1))((enum rtx_code) ((((p->loc)->u.fld[1]).rt_rtx))->code ) == SYMBOL_REF |
1752 | && p->setting_insn |
1753 | && (note = find_reg_note (p->setting_insn, REG_EQUAL, NULL_RTX(rtx) 0)) |
1754 | && XEXP (note, 0)(((note)->u.fld[0]).rt_rtx) == XEXP (p->loc, 1)(((p->loc)->u.fld[1]).rt_rtx)) |
1755 | return XEXP (p->loc, 1)(((p->loc)->u.fld[1]).rt_rtx); |
1756 | result = cselib_expand_value_rtx_1 (p->loc, evd, max_depth - 1); |
1757 | if (result) |
1758 | return result; |
1759 | } |
1760 | |
1761 | } |
1762 | |
1763 | if (regno != UINT_MAX(2147483647 *2U +1U)) |
1764 | { |
1765 | rtx result; |
1766 | if (dump_file && (dump_flags & TDF_CSELIB)) |
1767 | fprintf (dump_file, "r%d\n", regno); |
1768 | |
1769 | result = cselib_expand_value_rtx_1 (reg_result, evd, max_depth - 1); |
1770 | if (result) |
1771 | return result; |
1772 | } |
1773 | |
1774 | if (dump_file && (dump_flags & TDF_CSELIB)) |
1775 | { |
1776 | if (reg_result) |
1777 | { |
1778 | print_inline_rtx (dump_file, reg_result, 0); |
1779 | fprintf (dump_file, "\n"); |
1780 | } |
1781 | else |
1782 | fprintf (dump_file, "NULL\n"); |
1783 | } |
1784 | return reg_result; |
1785 | } |
1786 | |
1787 | |
1788 | /* Forward substitute and expand an expression out to its roots. |
1789 | This is the opposite of common subexpression. Because local value |
1790 | numbering is such a weak optimization, the expanded expression is |
1791 | pretty much unique (not from a pointer equals point of view but |
1792 | from a tree shape point of view. |
1793 | |
1794 | This function returns NULL if the expansion fails. The expansion |
1795 | will fail if there is no value number for one of the operands or if |
1796 | one of the operands has been overwritten between the current insn |
1797 | and the beginning of the basic block. For instance x has no |
1798 | expansion in: |
1799 | |
1800 | r1 <- r1 + 3 |
1801 | x <- r1 + 8 |
1802 | |
1803 | REGS_ACTIVE is a scratch bitmap that should be clear when passing in. |
1804 | It is clear on return. */ |
1805 | |
1806 | rtx |
1807 | cselib_expand_value_rtx (rtx orig, bitmap regs_active, int max_depth) |
1808 | { |
1809 | struct expand_value_data evd; |
1810 | |
1811 | evd.regs_active = regs_active; |
1812 | evd.callback = NULLnullptr; |
1813 | evd.callback_arg = NULLnullptr; |
1814 | evd.dummy = false; |
1815 | |
1816 | return cselib_expand_value_rtx_1 (orig, &evd, max_depth); |
1817 | } |
1818 | |
1819 | /* Same as cselib_expand_value_rtx, but using a callback to try to |
1820 | resolve some expressions. The CB function should return ORIG if it |
1821 | can't or does not want to deal with a certain RTX. Any other |
1822 | return value, including NULL, will be used as the expansion for |
1823 | VALUE, without any further changes. */ |
1824 | |
1825 | rtx |
1826 | cselib_expand_value_rtx_cb (rtx orig, bitmap regs_active, int max_depth, |
1827 | cselib_expand_callback cb, void *data) |
1828 | { |
1829 | struct expand_value_data evd; |
1830 | |
1831 | evd.regs_active = regs_active; |
1832 | evd.callback = cb; |
1833 | evd.callback_arg = data; |
1834 | evd.dummy = false; |
1835 | |
1836 | return cselib_expand_value_rtx_1 (orig, &evd, max_depth); |
1837 | } |
1838 | |
1839 | /* Similar to cselib_expand_value_rtx_cb, but no rtxs are actually copied |
1840 | or simplified. Useful to find out whether cselib_expand_value_rtx_cb |
1841 | would return NULL or non-NULL, without allocating new rtx. */ |
1842 | |
1843 | bool |
1844 | cselib_dummy_expand_value_rtx_cb (rtx orig, bitmap regs_active, int max_depth, |
1845 | cselib_expand_callback cb, void *data) |
1846 | { |
1847 | struct expand_value_data evd; |
1848 | |
1849 | evd.regs_active = regs_active; |
1850 | evd.callback = cb; |
1851 | evd.callback_arg = data; |
1852 | evd.dummy = true; |
1853 | |
1854 | return cselib_expand_value_rtx_1 (orig, &evd, max_depth) != NULLnullptr; |
1855 | } |
1856 | |
1857 | /* Internal implementation of cselib_expand_value_rtx and |
1858 | cselib_expand_value_rtx_cb. */ |
1859 | |
1860 | static rtx |
1861 | cselib_expand_value_rtx_1 (rtx orig, struct expand_value_data *evd, |
1862 | int max_depth) |
1863 | { |
1864 | rtx copy, scopy; |
1865 | int i, j; |
1866 | RTX_CODEenum rtx_code code; |
1867 | const char *format_ptr; |
1868 | machine_mode mode; |
1869 | |
1870 | code = GET_CODE (orig)((enum rtx_code) (orig)->code); |
1871 | |
1872 | /* For the context of dse, if we end up expand into a huge tree, we |
1873 | will not have a useful address, so we might as well just give up |
1874 | quickly. */ |
1875 | if (max_depth <= 0) |
1876 | return NULLnullptr; |
1877 | |
1878 | switch (code) |
1879 | { |
1880 | case REG: |
1881 | { |
1882 | struct elt_list *l = REG_VALUES (REGNO (orig))reg_values[(rhs_regno(orig))]; |
1883 | |
1884 | if (l && l->elt == NULLnullptr) |
1885 | l = l->next; |
1886 | for (; l; l = l->next) |
1887 | if (GET_MODE (l->elt->val_rtx)((machine_mode) (l->elt->val_rtx)->mode) == GET_MODE (orig)((machine_mode) (orig)->mode)) |
1888 | { |
1889 | rtx result; |
1890 | unsigned regno = REGNO (orig)(rhs_regno(orig)); |
1891 | |
1892 | /* The only thing that we are not willing to do (this |
1893 | is requirement of dse and if others potential uses |
1894 | need this function we should add a parm to control |
1895 | it) is that we will not substitute the |
1896 | STACK_POINTER_REGNUM, FRAME_POINTER or the |
1897 | HARD_FRAME_POINTER. |
1898 | |
1899 | These expansions confuses the code that notices that |
1900 | stores into the frame go dead at the end of the |
1901 | function and that the frame is not effected by calls |
1902 | to subroutines. If you allow the |
1903 | STACK_POINTER_REGNUM substitution, then dse will |
1904 | think that parameter pushing also goes dead which is |
1905 | wrong. If you allow the FRAME_POINTER or the |
1906 | HARD_FRAME_POINTER then you lose the opportunity to |
1907 | make the frame assumptions. */ |
1908 | if (regno == STACK_POINTER_REGNUM7 |
1909 | || regno == FRAME_POINTER_REGNUM19 |
1910 | || regno == HARD_FRAME_POINTER_REGNUM6 |
1911 | || regno == cfa_base_preserved_regno) |
1912 | return orig; |
1913 | |
1914 | bitmap_set_bit (evd->regs_active, regno); |
1915 | |
1916 | if (dump_file && (dump_flags & TDF_CSELIB)) |
1917 | fprintf (dump_file, "expanding: r%d into: ", regno); |
1918 | |
1919 | result = expand_loc (l->elt->locs, evd, max_depth); |
1920 | bitmap_clear_bit (evd->regs_active, regno); |
1921 | |
1922 | if (result) |
1923 | return result; |
1924 | else |
1925 | return orig; |
1926 | } |
1927 | return orig; |
1928 | } |
1929 | |
1930 | CASE_CONST_ANYcase CONST_INT: case CONST_WIDE_INT: case CONST_POLY_INT: case CONST_DOUBLE: case CONST_FIXED: case CONST_VECTOR: |
1931 | case SYMBOL_REF: |
1932 | case CODE_LABEL: |
1933 | case PC: |
1934 | case SCRATCH: |
1935 | /* SCRATCH must be shared because they represent distinct values. */ |
1936 | return orig; |
1937 | case CLOBBER: |
1938 | if (REG_P (XEXP (orig, 0))(((enum rtx_code) ((((orig)->u.fld[0]).rt_rtx))->code) == REG) && HARD_REGISTER_NUM_P (REGNO (XEXP (orig, 0)))(((rhs_regno((((orig)->u.fld[0]).rt_rtx)))) < 76)) |
1939 | return orig; |
1940 | break; |
1941 | |
1942 | case CONST: |
1943 | if (shared_const_p (orig)) |
1944 | return orig; |
1945 | break; |
1946 | |
1947 | case SUBREG: |
1948 | { |
1949 | rtx subreg; |
1950 | |
1951 | if (evd->callback) |
1952 | { |
1953 | subreg = evd->callback (orig, evd->regs_active, max_depth, |
1954 | evd->callback_arg); |
1955 | if (subreg != orig) |
1956 | return subreg; |
1957 | } |
1958 | |
1959 | subreg = cselib_expand_value_rtx_1 (SUBREG_REG (orig)(((orig)->u.fld[0]).rt_rtx), evd, |
1960 | max_depth - 1); |
1961 | if (!subreg) |
1962 | return NULLnullptr; |
1963 | scopy = simplify_gen_subreg (GET_MODE (orig)((machine_mode) (orig)->mode), subreg, |
1964 | GET_MODE (SUBREG_REG (orig))((machine_mode) ((((orig)->u.fld[0]).rt_rtx))->mode), |
1965 | SUBREG_BYTE (orig)(((orig)->u.fld[1]).rt_subreg)); |
1966 | if (scopy == NULLnullptr |
1967 | || (GET_CODE (scopy)((enum rtx_code) (scopy)->code) == SUBREG |
1968 | && !REG_P (SUBREG_REG (scopy))(((enum rtx_code) ((((scopy)->u.fld[0]).rt_rtx))->code) == REG) |
1969 | && !MEM_P (SUBREG_REG (scopy))(((enum rtx_code) ((((scopy)->u.fld[0]).rt_rtx))->code) == MEM))) |
1970 | return NULLnullptr; |
1971 | |
1972 | return scopy; |
1973 | } |
1974 | |
1975 | case VALUE: |
1976 | { |
1977 | rtx result; |
1978 | |
1979 | if (dump_file && (dump_flags & TDF_CSELIB)) |
1980 | { |
1981 | fputs ("\nexpanding ", dump_file); |
1982 | print_rtl_single (dump_file, orig); |
1983 | fputs (" into...", dump_file); |
1984 | } |
1985 | |
1986 | if (evd->callback) |
1987 | { |
1988 | result = evd->callback (orig, evd->regs_active, max_depth, |
1989 | evd->callback_arg); |
1990 | |
1991 | if (result != orig) |
1992 | return result; |
1993 | } |
1994 | |
1995 | result = expand_loc (CSELIB_VAL_PTR (orig)(((orig)->u.fld[0]).rt_cselib)->locs, evd, max_depth); |
1996 | return result; |
1997 | } |
1998 | |
1999 | case DEBUG_EXPR: |
2000 | if (evd->callback) |
2001 | return evd->callback (orig, evd->regs_active, max_depth, |
2002 | evd->callback_arg); |
2003 | return orig; |
2004 | |
2005 | default: |
2006 | break; |
2007 | } |
2008 | |
2009 | /* Copy the various flags, fields, and other information. We assume |
2010 | that all fields need copying, and then clear the fields that should |
2011 | not be copied. That is the sensible default behavior, and forces |
2012 | us to explicitly document why we are *not* copying a flag. */ |
2013 | if (evd->dummy) |
2014 | copy = NULLnullptr; |
2015 | else |
2016 | copy = shallow_copy_rtx (orig); |
2017 | |
2018 | format_ptr = GET_RTX_FORMAT (code)(rtx_format[(int) (code)]); |
2019 | |
2020 | for (i = 0; i < GET_RTX_LENGTH (code)(rtx_length[(int) (code)]); i++) |
2021 | switch (*format_ptr++) |
2022 | { |
2023 | case 'e': |
2024 | if (XEXP (orig, i)(((orig)->u.fld[i]).rt_rtx) != NULLnullptr) |
2025 | { |
2026 | rtx result = cselib_expand_value_rtx_1 (XEXP (orig, i)(((orig)->u.fld[i]).rt_rtx), evd, |
2027 | max_depth - 1); |
2028 | if (!result) |
2029 | return NULLnullptr; |
2030 | if (copy) |
2031 | XEXP (copy, i)(((copy)->u.fld[i]).rt_rtx) = result; |
2032 | } |
2033 | break; |
2034 | |
2035 | case 'E': |
2036 | case 'V': |
2037 | if (XVEC (orig, i)(((orig)->u.fld[i]).rt_rtvec) != NULLnullptr) |
2038 | { |
2039 | if (copy) |
2040 | XVEC (copy, i)(((copy)->u.fld[i]).rt_rtvec) = rtvec_alloc (XVECLEN (orig, i)(((((orig)->u.fld[i]).rt_rtvec))->num_elem)); |
2041 | for (j = 0; j < XVECLEN (orig, i)(((((orig)->u.fld[i]).rt_rtvec))->num_elem); j++) |
2042 | { |
2043 | rtx result = cselib_expand_value_rtx_1 (XVECEXP (orig, i, j)(((((orig)->u.fld[i]).rt_rtvec))->elem[j]), |
2044 | evd, max_depth - 1); |
2045 | if (!result) |
2046 | return NULLnullptr; |
2047 | if (copy) |
2048 | XVECEXP (copy, i, j)(((((copy)->u.fld[i]).rt_rtvec))->elem[j]) = result; |
2049 | } |
2050 | } |
2051 | break; |
2052 | |
2053 | case 't': |
2054 | case 'w': |
2055 | case 'i': |
2056 | case 's': |
2057 | case 'S': |
2058 | case 'T': |
2059 | case 'u': |
2060 | case 'B': |
2061 | case '0': |
2062 | /* These are left unchanged. */ |
2063 | break; |
2064 | |
2065 | default: |
2066 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 2066, __FUNCTION__)); |
2067 | } |
2068 | |
2069 | if (evd->dummy) |
2070 | return orig; |
2071 | |
2072 | mode = GET_MODE (copy)((machine_mode) (copy)->mode); |
2073 | /* If an operand has been simplified into CONST_INT, which doesn't |
2074 | have a mode and the mode isn't derivable from whole rtx's mode, |
2075 | try simplify_*_operation first with mode from original's operand |
2076 | and as a fallback wrap CONST_INT into gen_rtx_CONST. */ |
2077 | scopy = copy; |
2078 | switch (GET_RTX_CLASS (code)(rtx_class[(int) (code)])) |
2079 | { |
2080 | case RTX_UNARY: |
2081 | if (CONST_INT_P (XEXP (copy, 0))(((enum rtx_code) ((((copy)->u.fld[0]).rt_rtx))->code) == CONST_INT) |
2082 | && GET_MODE (XEXP (orig, 0))((machine_mode) ((((orig)->u.fld[0]).rt_rtx))->mode) != VOIDmode((void) 0, E_VOIDmode)) |
2083 | { |
2084 | scopy = simplify_unary_operation (code, mode, XEXP (copy, 0)(((copy)->u.fld[0]).rt_rtx), |
2085 | GET_MODE (XEXP (orig, 0))((machine_mode) ((((orig)->u.fld[0]).rt_rtx))->mode)); |
2086 | if (scopy) |
2087 | return scopy; |
2088 | } |
2089 | break; |
2090 | case RTX_COMM_ARITH: |
2091 | case RTX_BIN_ARITH: |
2092 | /* These expressions can derive operand modes from the whole rtx's mode. */ |
2093 | break; |
2094 | case RTX_TERNARY: |
2095 | case RTX_BITFIELD_OPS: |
2096 | if (CONST_INT_P (XEXP (copy, 0))(((enum rtx_code) ((((copy)->u.fld[0]).rt_rtx))->code) == CONST_INT) |
2097 | && GET_MODE (XEXP (orig, 0))((machine_mode) ((((orig)->u.fld[0]).rt_rtx))->mode) != VOIDmode((void) 0, E_VOIDmode)) |
2098 | { |
2099 | scopy = simplify_ternary_operation (code, mode, |
2100 | GET_MODE (XEXP (orig, 0))((machine_mode) ((((orig)->u.fld[0]).rt_rtx))->mode), |
2101 | XEXP (copy, 0)(((copy)->u.fld[0]).rt_rtx), XEXP (copy, 1)(((copy)->u.fld[1]).rt_rtx), |
2102 | XEXP (copy, 2)(((copy)->u.fld[2]).rt_rtx)); |
2103 | if (scopy) |
2104 | return scopy; |
2105 | } |
2106 | break; |
2107 | case RTX_COMPARE: |
2108 | case RTX_COMM_COMPARE: |
2109 | if (CONST_INT_P (XEXP (copy, 0))(((enum rtx_code) ((((copy)->u.fld[0]).rt_rtx))->code) == CONST_INT) |
2110 | && GET_MODE (XEXP (copy, 1))((machine_mode) ((((copy)->u.fld[1]).rt_rtx))->mode) == VOIDmode((void) 0, E_VOIDmode) |
2111 | && (GET_MODE (XEXP (orig, 0))((machine_mode) ((((orig)->u.fld[0]).rt_rtx))->mode) != VOIDmode((void) 0, E_VOIDmode) |
2112 | || GET_MODE (XEXP (orig, 1))((machine_mode) ((((orig)->u.fld[1]).rt_rtx))->mode) != VOIDmode((void) 0, E_VOIDmode))) |
2113 | { |
2114 | scopy = simplify_relational_operation (code, mode, |
2115 | (GET_MODE (XEXP (orig, 0))((machine_mode) ((((orig)->u.fld[0]).rt_rtx))->mode) |
2116 | != VOIDmode((void) 0, E_VOIDmode)) |
2117 | ? GET_MODE (XEXP (orig, 0))((machine_mode) ((((orig)->u.fld[0]).rt_rtx))->mode) |
2118 | : GET_MODE (XEXP (orig, 1))((machine_mode) ((((orig)->u.fld[1]).rt_rtx))->mode), |
2119 | XEXP (copy, 0)(((copy)->u.fld[0]).rt_rtx), |
2120 | XEXP (copy, 1)(((copy)->u.fld[1]).rt_rtx)); |
2121 | if (scopy) |
2122 | return scopy; |
2123 | } |
2124 | break; |
2125 | default: |
2126 | break; |
2127 | } |
2128 | scopy = simplify_rtx (copy); |
2129 | if (scopy) |
2130 | return scopy; |
2131 | return copy; |
2132 | } |
2133 | |
2134 | /* Walk rtx X and replace all occurrences of REG and MEM subexpressions |
2135 | with VALUE expressions. This way, it becomes independent of changes |
2136 | to registers and memory. |
2137 | X isn't actually modified; if modifications are needed, new rtl is |
2138 | allocated. However, the return value can share rtl with X. |
2139 | If X is within a MEM, MEMMODE must be the mode of the MEM. */ |
2140 | |
2141 | rtx |
2142 | cselib_subst_to_values (rtx x, machine_mode memmode) |
2143 | { |
2144 | enum rtx_code code = GET_CODE (x)((enum rtx_code) (x)->code); |
2145 | const char *fmt = GET_RTX_FORMAT (code)(rtx_format[(int) (code)]); |
2146 | cselib_val *e; |
2147 | struct elt_list *l; |
2148 | rtx copy = x; |
2149 | int i; |
2150 | poly_int64 offset; |
2151 | |
2152 | switch (code) |
2153 | { |
2154 | case REG: |
2155 | l = REG_VALUES (REGNO (x))reg_values[(rhs_regno(x))]; |
2156 | if (l && l->elt == NULLnullptr) |
2157 | l = l->next; |
2158 | for (; l; l = l->next) |
2159 | if (GET_MODE (l->elt->val_rtx)((machine_mode) (l->elt->val_rtx)->mode) == GET_MODE (x)((machine_mode) (x)->mode)) |
2160 | return l->elt->val_rtx; |
2161 | |
2162 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 2162, __FUNCTION__)); |
2163 | |
2164 | case MEM: |
2165 | e = cselib_lookup_mem (x, 0); |
2166 | /* This used to happen for autoincrements, but we deal with them |
2167 | properly now. Remove the if stmt for the next release. */ |
2168 | if (! e) |
2169 | { |
2170 | /* Assign a value that doesn't match any other. */ |
2171 | e = new_cselib_val (next_uid, GET_MODE (x)((machine_mode) (x)->mode), x); |
2172 | } |
2173 | return e->val_rtx; |
2174 | |
2175 | case ENTRY_VALUE: |
2176 | e = cselib_lookup (x, GET_MODE (x)((machine_mode) (x)->mode), 0, memmode); |
2177 | if (! e) |
2178 | break; |
2179 | return e->val_rtx; |
2180 | |
2181 | CASE_CONST_ANYcase CONST_INT: case CONST_WIDE_INT: case CONST_POLY_INT: case CONST_DOUBLE: case CONST_FIXED: case CONST_VECTOR: |
2182 | return x; |
2183 | |
2184 | case PRE_DEC: |
2185 | case PRE_INC: |
2186 | gcc_assert (memmode != VOIDmode)((void)(!(memmode != ((void) 0, E_VOIDmode)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 2186, __FUNCTION__), 0 : 0)); |
2187 | offset = GET_MODE_SIZE (memmode); |
2188 | if (code == PRE_DEC) |
2189 | offset = -offset; |
2190 | return cselib_subst_to_values (plus_constant (GET_MODE (x)((machine_mode) (x)->mode), |
2191 | XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), offset), |
2192 | memmode); |
2193 | |
2194 | case PRE_MODIFY: |
2195 | gcc_assert (memmode != VOIDmode)((void)(!(memmode != ((void) 0, E_VOIDmode)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 2195, __FUNCTION__), 0 : 0)); |
2196 | return cselib_subst_to_values (XEXP (x, 1)(((x)->u.fld[1]).rt_rtx), memmode); |
2197 | |
2198 | case POST_DEC: |
2199 | case POST_INC: |
2200 | case POST_MODIFY: |
2201 | gcc_assert (memmode != VOIDmode)((void)(!(memmode != ((void) 0, E_VOIDmode)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 2201, __FUNCTION__), 0 : 0)); |
2202 | return cselib_subst_to_values (XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), memmode); |
2203 | |
2204 | case PLUS: |
2205 | if (GET_MODE (x)((machine_mode) (x)->mode) == Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))) && CONST_INT_P (XEXP (x, 1))(((enum rtx_code) ((((x)->u.fld[1]).rt_rtx))->code) == CONST_INT )) |
2206 | { |
2207 | rtx t = cselib_subst_to_values (XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), memmode); |
2208 | if (GET_CODE (t)((enum rtx_code) (t)->code) == VALUE) |
2209 | { |
2210 | if (SP_DERIVED_VALUE_P (t)(__extension__ ({ __typeof ((t)) const _rtx = ((t)); if (((enum rtx_code) (_rtx)->code) != VALUE) rtl_check_failed_flag ( "SP_DERIVED_VALUE_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 2210, __FUNCTION__); _rtx; })->call) && XEXP (x, 1)(((x)->u.fld[1]).rt_rtx) == const0_rtx(const_int_rtx[64])) |
2211 | return t; |
2212 | for (struct elt_loc_list *l = CSELIB_VAL_PTR (t)(((t)->u.fld[0]).rt_cselib)->locs; |
2213 | l; l = l->next) |
2214 | if (GET_CODE (l->loc)((enum rtx_code) (l->loc)->code) == PLUS |
2215 | && GET_CODE (XEXP (l->loc, 0))((enum rtx_code) ((((l->loc)->u.fld[0]).rt_rtx))->code ) == VALUE |
2216 | && SP_DERIVED_VALUE_P (XEXP (l->loc, 0))(__extension__ ({ __typeof (((((l->loc)->u.fld[0]).rt_rtx ))) const _rtx = (((((l->loc)->u.fld[0]).rt_rtx))); if ( ((enum rtx_code) (_rtx)->code) != VALUE) rtl_check_failed_flag ("SP_DERIVED_VALUE_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 2216, __FUNCTION__); _rtx; })->call) |
2217 | && CONST_INT_P (XEXP (l->loc, 1))(((enum rtx_code) ((((l->loc)->u.fld[1]).rt_rtx))->code ) == CONST_INT)) |
2218 | return plus_constant (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))), l->loc, INTVAL (XEXP (x, 1))(((((x)->u.fld[1]).rt_rtx))->u.hwint[0])); |
2219 | } |
2220 | if (t != XEXP (x, 0)(((x)->u.fld[0]).rt_rtx)) |
2221 | { |
2222 | copy = shallow_copy_rtx (x); |
2223 | XEXP (copy, 0)(((copy)->u.fld[0]).rt_rtx) = t; |
2224 | } |
2225 | return copy; |
2226 | } |
2227 | |
2228 | default: |
2229 | break; |
2230 | } |
2231 | |
2232 | for (i = GET_RTX_LENGTH (code)(rtx_length[(int) (code)]) - 1; i >= 0; i--) |
2233 | { |
2234 | if (fmt[i] == 'e') |
2235 | { |
2236 | rtx t = cselib_subst_to_values (XEXP (x, i)(((x)->u.fld[i]).rt_rtx), memmode); |
2237 | |
2238 | if (t != XEXP (x, i)(((x)->u.fld[i]).rt_rtx)) |
2239 | { |
2240 | if (x == copy) |
2241 | copy = shallow_copy_rtx (x); |
2242 | XEXP (copy, i)(((copy)->u.fld[i]).rt_rtx) = t; |
2243 | } |
2244 | } |
2245 | else if (fmt[i] == 'E') |
2246 | { |
2247 | int j; |
2248 | |
2249 | for (j = 0; j < XVECLEN (x, i)(((((x)->u.fld[i]).rt_rtvec))->num_elem); j++) |
2250 | { |
2251 | rtx t = cselib_subst_to_values (XVECEXP (x, i, j)(((((x)->u.fld[i]).rt_rtvec))->elem[j]), memmode); |
2252 | |
2253 | if (t != XVECEXP (x, i, j)(((((x)->u.fld[i]).rt_rtvec))->elem[j])) |
2254 | { |
2255 | if (XVEC (x, i)(((x)->u.fld[i]).rt_rtvec) == XVEC (copy, i)(((copy)->u.fld[i]).rt_rtvec)) |
2256 | { |
2257 | if (x == copy) |
2258 | copy = shallow_copy_rtx (x); |
2259 | XVEC (copy, i)(((copy)->u.fld[i]).rt_rtvec) = shallow_copy_rtvec (XVEC (x, i)(((x)->u.fld[i]).rt_rtvec)); |
2260 | } |
2261 | XVECEXP (copy, i, j)(((((copy)->u.fld[i]).rt_rtvec))->elem[j]) = t; |
2262 | } |
2263 | } |
2264 | } |
2265 | } |
2266 | |
2267 | return copy; |
2268 | } |
2269 | |
2270 | /* Wrapper for cselib_subst_to_values, that indicates X is in INSN. */ |
2271 | |
2272 | rtx |
2273 | cselib_subst_to_values_from_insn (rtx x, machine_mode memmode, rtx_insn *insn) |
2274 | { |
2275 | rtx ret; |
2276 | gcc_assert (!cselib_current_insn)((void)(!(!cselib_current_insn) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 2276, __FUNCTION__), 0 : 0)); |
2277 | cselib_current_insn = insn; |
2278 | ret = cselib_subst_to_values (x, memmode); |
2279 | cselib_current_insn = NULLnullptr; |
2280 | return ret; |
2281 | } |
2282 | |
2283 | /* Look up the rtl expression X in our tables and return the value it |
2284 | has. If CREATE is zero, we return NULL if we don't know the value. |
2285 | Otherwise, we create a new one if possible, using mode MODE if X |
2286 | doesn't have a mode (i.e. because it's a constant). When X is part |
2287 | of an address, MEMMODE should be the mode of the enclosing MEM if |
2288 | we're tracking autoinc expressions. */ |
2289 | |
2290 | static cselib_val * |
2291 | cselib_lookup_1 (rtx x, machine_mode mode, |
2292 | int create, machine_mode memmode) |
2293 | { |
2294 | cselib_val **slot; |
2295 | cselib_val *e; |
2296 | unsigned int hashval; |
2297 | |
2298 | if (GET_MODE (x)((machine_mode) (x)->mode) != VOIDmode((void) 0, E_VOIDmode)) |
2299 | mode = GET_MODE (x)((machine_mode) (x)->mode); |
2300 | |
2301 | if (GET_CODE (x)((enum rtx_code) (x)->code) == VALUE) |
2302 | return CSELIB_VAL_PTR (x)(((x)->u.fld[0]).rt_cselib); |
2303 | |
2304 | if (REG_P (x)(((enum rtx_code) (x)->code) == REG)) |
2305 | { |
2306 | struct elt_list *l; |
2307 | unsigned int i = REGNO (x)(rhs_regno(x)); |
2308 | |
2309 | l = REG_VALUES (i)reg_values[i]; |
2310 | if (l && l->elt == NULLnullptr) |
2311 | l = l->next; |
2312 | for (; l; l = l->next) |
2313 | if (mode == GET_MODE (l->elt->val_rtx)((machine_mode) (l->elt->val_rtx)->mode)) |
2314 | { |
2315 | promote_debug_loc (l->elt->locs); |
2316 | return l->elt; |
2317 | } |
2318 | |
2319 | if (! create) |
2320 | return 0; |
2321 | |
2322 | if (i < FIRST_PSEUDO_REGISTER76) |
2323 | { |
2324 | unsigned int n = hard_regno_nregs (i, mode); |
2325 | |
2326 | if (n > max_value_regs) |
2327 | max_value_regs = n; |
2328 | } |
2329 | |
2330 | e = new_cselib_val (next_uid, GET_MODE (x)((machine_mode) (x)->mode), x); |
2331 | if (GET_MODE (x)((machine_mode) (x)->mode) == Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))) && x == stack_pointer_rtx((this_target_rtl->x_global_rtl)[GR_STACK_POINTER])) |
2332 | SP_DERIVED_VALUE_P (e->val_rtx)(__extension__ ({ __typeof ((e->val_rtx)) const _rtx = ((e ->val_rtx)); if (((enum rtx_code) (_rtx)->code) != VALUE ) rtl_check_failed_flag ("SP_DERIVED_VALUE_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 2332, __FUNCTION__); _rtx; })->call) = 1; |
2333 | new_elt_loc_list (e, x); |
2334 | |
2335 | scalar_int_mode int_mode; |
2336 | if (REG_VALUES (i)reg_values[i] == 0) |
2337 | { |
2338 | /* Maintain the invariant that the first entry of |
2339 | REG_VALUES, if present, must be the value used to set the |
2340 | register, or NULL. */ |
2341 | used_regs[n_used_regs++] = i; |
2342 | REG_VALUES (i)reg_values[i] = new_elt_list (REG_VALUES (i)reg_values[i], NULLnullptr); |
2343 | } |
2344 | else if (cselib_preserve_constants |
2345 | && is_int_mode (mode, &int_mode)) |
2346 | { |
2347 | /* During var-tracking, try harder to find equivalences |
2348 | for SUBREGs. If a setter sets say a DImode register |
2349 | and user uses that register only in SImode, add a lowpart |
2350 | subreg location. */ |
2351 | struct elt_list *lwider = NULLnullptr; |
2352 | scalar_int_mode lmode; |
2353 | l = REG_VALUES (i)reg_values[i]; |
2354 | if (l && l->elt == NULLnullptr) |
2355 | l = l->next; |
2356 | for (; l; l = l->next) |
2357 | if (is_int_mode (GET_MODE (l->elt->val_rtx)((machine_mode) (l->elt->val_rtx)->mode), &lmode) |
2358 | && GET_MODE_SIZE (lmode) > GET_MODE_SIZE (int_mode) |
2359 | && (lwider == NULLnullptr |
2360 | || partial_subreg_p (lmode, |
2361 | GET_MODE (lwider->elt->val_rtx)((machine_mode) (lwider->elt->val_rtx)->mode)))) |
2362 | { |
2363 | struct elt_loc_list *el; |
2364 | if (i < FIRST_PSEUDO_REGISTER76 |
2365 | && hard_regno_nregs (i, lmode) != 1) |
2366 | continue; |
2367 | for (el = l->elt->locs; el; el = el->next) |
2368 | if (!REG_P (el->loc)(((enum rtx_code) (el->loc)->code) == REG)) |
2369 | break; |
2370 | if (el) |
2371 | lwider = l; |
2372 | } |
2373 | if (lwider) |
2374 | { |
2375 | rtx sub = lowpart_subreg (int_mode, lwider->elt->val_rtx, |
2376 | GET_MODE (lwider->elt->val_rtx)((machine_mode) (lwider->elt->val_rtx)->mode)); |
2377 | if (sub) |
2378 | new_elt_loc_list (e, sub); |
2379 | } |
2380 | } |
2381 | REG_VALUES (i)reg_values[i]->next = new_elt_list (REG_VALUES (i)reg_values[i]->next, e); |
2382 | slot = cselib_find_slot (mode, x, e->hash, INSERT, memmode); |
2383 | *slot = e; |
2384 | return e; |
2385 | } |
2386 | |
2387 | if (MEM_P (x)(((enum rtx_code) (x)->code) == MEM)) |
2388 | return cselib_lookup_mem (x, create); |
2389 | |
2390 | hashval = cselib_hash_rtx (x, create, memmode); |
2391 | /* Can't even create if hashing is not possible. */ |
2392 | if (! hashval) |
2393 | return 0; |
2394 | |
2395 | slot = cselib_find_slot (mode, x, hashval, |
2396 | create ? INSERT : NO_INSERT, memmode); |
2397 | if (slot == 0) |
2398 | return 0; |
2399 | |
2400 | e = (cselib_val *) *slot; |
2401 | if (e) |
2402 | return e; |
2403 | |
2404 | e = new_cselib_val (hashval, mode, x); |
2405 | |
2406 | /* We have to fill the slot before calling cselib_subst_to_values: |
2407 | the hash table is inconsistent until we do so, and |
2408 | cselib_subst_to_values will need to do lookups. */ |
2409 | *slot = e; |
2410 | rtx v = cselib_subst_to_values (x, memmode); |
2411 | |
2412 | /* If cselib_preserve_constants, we might get a SP_DERIVED_VALUE_P |
2413 | VALUE that isn't in the hash tables anymore. */ |
2414 | if (GET_CODE (v)((enum rtx_code) (v)->code) == VALUE && SP_DERIVED_VALUE_P (v)(__extension__ ({ __typeof ((v)) const _rtx = ((v)); if (((enum rtx_code) (_rtx)->code) != VALUE) rtl_check_failed_flag ( "SP_DERIVED_VALUE_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 2414, __FUNCTION__); _rtx; })->call) && PRESERVED_VALUE_P (v)(__extension__ ({ __typeof ((v)) const _rtx = ((v)); if (((enum rtx_code) (_rtx)->code) != VALUE) rtl_check_failed_flag ( "PRESERVED_VALUE_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 2414, __FUNCTION__); _rtx; })->unchanging)) |
2415 | PRESERVED_VALUE_P (e->val_rtx)(__extension__ ({ __typeof ((e->val_rtx)) const _rtx = ((e ->val_rtx)); if (((enum rtx_code) (_rtx)->code) != VALUE ) rtl_check_failed_flag ("PRESERVED_VALUE_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 2415, __FUNCTION__); _rtx; })->unchanging) = 1; |
2416 | |
2417 | new_elt_loc_list (e, v); |
2418 | return e; |
2419 | } |
2420 | |
2421 | /* Wrapper for cselib_lookup, that indicates X is in INSN. */ |
2422 | |
2423 | cselib_val * |
2424 | cselib_lookup_from_insn (rtx x, machine_mode mode, |
2425 | int create, machine_mode memmode, rtx_insn *insn) |
2426 | { |
2427 | cselib_val *ret; |
2428 | |
2429 | gcc_assert (!cselib_current_insn)((void)(!(!cselib_current_insn) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 2429, __FUNCTION__), 0 : 0)); |
2430 | cselib_current_insn = insn; |
2431 | |
2432 | ret = cselib_lookup (x, mode, create, memmode); |
2433 | |
2434 | cselib_current_insn = NULLnullptr; |
2435 | |
2436 | return ret; |
2437 | } |
2438 | |
2439 | /* Wrapper for cselib_lookup_1, that logs the lookup result and |
2440 | maintains invariants related with debug insns. */ |
2441 | |
2442 | cselib_val * |
2443 | cselib_lookup (rtx x, machine_mode mode, |
2444 | int create, machine_mode memmode) |
2445 | { |
2446 | cselib_val *ret = cselib_lookup_1 (x, mode, create, memmode); |
2447 | |
2448 | /* ??? Should we return NULL if we're not to create an entry, the |
2449 | found loc is a debug loc and cselib_current_insn is not DEBUG? |
2450 | If so, we should also avoid converting val to non-DEBUG; probably |
2451 | easiest setting cselib_current_insn to NULL before the call |
2452 | above. */ |
2453 | |
2454 | if (dump_file && (dump_flags & TDF_CSELIB)) |
2455 | { |
2456 | fputs ("cselib lookup ", dump_file); |
2457 | print_inline_rtx (dump_file, x, 2); |
2458 | fprintf (dump_file, " => %u:%u\n", |
2459 | ret ? ret->uid : 0, |
2460 | ret ? ret->hash : 0); |
2461 | } |
2462 | |
2463 | return ret; |
2464 | } |
2465 | |
2466 | /* Invalidate the value at *L, which is part of REG_VALUES (REGNO). */ |
2467 | |
2468 | static void |
2469 | cselib_invalidate_regno_val (unsigned int regno, struct elt_list **l) |
2470 | { |
2471 | cselib_val *v = (*l)->elt; |
2472 | if (*l == REG_VALUES (regno)reg_values[regno]) |
2473 | { |
2474 | /* Maintain the invariant that the first entry of |
2475 | REG_VALUES, if present, must be the value used to set |
2476 | the register, or NULL. This is also nice because |
2477 | then we won't push the same regno onto user_regs |
2478 | multiple times. */ |
2479 | (*l)->elt = NULLnullptr; |
2480 | l = &(*l)->next; |
Value stored to 'l' is never read | |
2481 | } |
2482 | else |
2483 | unchain_one_elt_list (l); |
2484 | |
2485 | v = canonical_cselib_val (v); |
2486 | |
2487 | bool had_locs = v->locs != NULLnullptr; |
2488 | rtx_insn *setting_insn = v->locs ? v->locs->setting_insn : NULLnullptr; |
2489 | |
2490 | /* Now, we clear the mapping from value to reg. It must exist, so |
2491 | this code will crash intentionally if it doesn't. */ |
2492 | for (elt_loc_list **p = &v->locs; ; p = &(*p)->next) |
2493 | { |
2494 | rtx x = (*p)->loc; |
2495 | |
2496 | if (REG_P (x)(((enum rtx_code) (x)->code) == REG) && REGNO (x)(rhs_regno(x)) == regno) |
2497 | { |
2498 | unchain_one_elt_loc_list (p); |
2499 | break; |
2500 | } |
2501 | } |
2502 | |
2503 | if (had_locs && cselib_useless_value_p (v)) |
2504 | { |
2505 | if (setting_insn && DEBUG_INSN_P (setting_insn)(((enum rtx_code) (setting_insn)->code) == DEBUG_INSN)) |
2506 | n_useless_debug_values++; |
2507 | else |
2508 | n_useless_values++; |
2509 | } |
2510 | } |
2511 | |
2512 | /* Invalidate any entries in reg_values that overlap REGNO. This is called |
2513 | if REGNO is changing. MODE is the mode of the assignment to REGNO, which |
2514 | is used to determine how many hard registers are being changed. If MODE |
2515 | is VOIDmode, then only REGNO is being changed; this is used when |
2516 | invalidating call clobbered registers across a call. */ |
2517 | |
2518 | static void |
2519 | cselib_invalidate_regno (unsigned int regno, machine_mode mode) |
2520 | { |
2521 | unsigned int endregno; |
2522 | unsigned int i; |
2523 | |
2524 | /* If we see pseudos after reload, something is _wrong_. */ |
2525 | gcc_assert (!reload_completed || regno < FIRST_PSEUDO_REGISTER((void)(!(!reload_completed || regno < 76 || reg_renumber[ regno] < 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 2526, __FUNCTION__), 0 : 0)) |
2526 | || reg_renumber[regno] < 0)((void)(!(!reload_completed || regno < 76 || reg_renumber[ regno] < 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 2526, __FUNCTION__), 0 : 0)); |
2527 | |
2528 | /* Determine the range of registers that must be invalidated. For |
2529 | pseudos, only REGNO is affected. For hard regs, we must take MODE |
2530 | into account, and we must also invalidate lower register numbers |
2531 | if they contain values that overlap REGNO. */ |
2532 | if (regno < FIRST_PSEUDO_REGISTER76) |
2533 | { |
2534 | gcc_assert (mode != VOIDmode)((void)(!(mode != ((void) 0, E_VOIDmode)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 2534, __FUNCTION__), 0 : 0)); |
2535 | |
2536 | if (regno < max_value_regs) |
2537 | i = 0; |
2538 | else |
2539 | i = regno - max_value_regs; |
2540 | |
2541 | endregno = end_hard_regno (mode, regno); |
2542 | } |
2543 | else |
2544 | { |
2545 | i = regno; |
2546 | endregno = regno + 1; |
2547 | } |
2548 | |
2549 | for (; i < endregno; i++) |
2550 | { |
2551 | struct elt_list **l = ®_VALUES (i)reg_values[i]; |
2552 | |
2553 | /* Go through all known values for this reg; if it overlaps the range |
2554 | we're invalidating, remove the value. */ |
2555 | while (*l) |
2556 | { |
2557 | cselib_val *v = (*l)->elt; |
2558 | unsigned int this_last = i; |
2559 | |
2560 | if (i < FIRST_PSEUDO_REGISTER76 && v != NULLnullptr) |
2561 | this_last = end_hard_regno (GET_MODE (v->val_rtx)((machine_mode) (v->val_rtx)->mode), i) - 1; |
2562 | |
2563 | if (this_last < regno || v == NULLnullptr |
2564 | || (v == cfa_base_preserved_val |
2565 | && i == cfa_base_preserved_regno)) |
2566 | { |
2567 | l = &(*l)->next; |
2568 | continue; |
2569 | } |
2570 | |
2571 | /* We have an overlap. */ |
2572 | cselib_invalidate_regno_val (i, l); |
2573 | } |
2574 | } |
2575 | } |
2576 | |
2577 | /* Invalidate any locations in the table which are changed because of a |
2578 | store to MEM_RTX. If this is called because of a non-const call |
2579 | instruction, MEM_RTX is (mem:BLK const0_rtx). */ |
2580 | |
2581 | static void |
2582 | cselib_invalidate_mem (rtx mem_rtx) |
2583 | { |
2584 | cselib_val **vp, *v, *next; |
2585 | int num_mems = 0; |
2586 | rtx mem_addr; |
2587 | |
2588 | mem_addr = canon_rtx (get_addr (XEXP (mem_rtx, 0)(((mem_rtx)->u.fld[0]).rt_rtx))); |
2589 | mem_rtx = canon_rtx (mem_rtx); |
2590 | |
2591 | vp = &first_containing_mem; |
2592 | for (v = *vp; v != &dummy_val; v = next) |
2593 | { |
2594 | bool has_mem = false; |
2595 | struct elt_loc_list **p = &v->locs; |
2596 | bool had_locs = v->locs != NULLnullptr; |
2597 | rtx_insn *setting_insn = v->locs ? v->locs->setting_insn : NULLnullptr; |
2598 | |
2599 | while (*p) |
2600 | { |
2601 | rtx x = (*p)->loc; |
2602 | cselib_val *addr; |
2603 | struct elt_list **mem_chain; |
2604 | |
2605 | /* MEMs may occur in locations only at the top level; below |
2606 | that every MEM or REG is substituted by its VALUE. */ |
2607 | if (!MEM_P (x)(((enum rtx_code) (x)->code) == MEM)) |
2608 | { |
2609 | p = &(*p)->next; |
2610 | continue; |
2611 | } |
2612 | if (num_mems < param_max_cselib_memory_locationsglobal_options.x_param_max_cselib_memory_locations |
2613 | && ! canon_anti_dependence (x, false, mem_rtx, |
2614 | GET_MODE (mem_rtx)((machine_mode) (mem_rtx)->mode), mem_addr)) |
2615 | { |
2616 | has_mem = true; |
2617 | num_mems++; |
2618 | p = &(*p)->next; |
2619 | continue; |
2620 | } |
2621 | |
2622 | /* This one overlaps. */ |
2623 | /* We must have a mapping from this MEM's address to the |
2624 | value (E). Remove that, too. */ |
2625 | addr = cselib_lookup (XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), VOIDmode((void) 0, E_VOIDmode), 0, GET_MODE (x)((machine_mode) (x)->mode)); |
2626 | addr = canonical_cselib_val (addr); |
2627 | gcc_checking_assert (v == canonical_cselib_val (v))((void)(!(v == canonical_cselib_val (v)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 2627, __FUNCTION__), 0 : 0)); |
2628 | mem_chain = &addr->addr_list; |
2629 | for (;;) |
2630 | { |
2631 | cselib_val *canon = canonical_cselib_val ((*mem_chain)->elt); |
2632 | |
2633 | if (canon == v) |
2634 | { |
2635 | unchain_one_elt_list (mem_chain); |
2636 | break; |
2637 | } |
2638 | |
2639 | /* Record canonicalized elt. */ |
2640 | (*mem_chain)->elt = canon; |
2641 | |
2642 | mem_chain = &(*mem_chain)->next; |
2643 | } |
2644 | |
2645 | unchain_one_elt_loc_list (p); |
2646 | } |
2647 | |
2648 | if (had_locs && cselib_useless_value_p (v)) |
2649 | { |
2650 | if (setting_insn && DEBUG_INSN_P (setting_insn)(((enum rtx_code) (setting_insn)->code) == DEBUG_INSN)) |
2651 | n_useless_debug_values++; |
2652 | else |
2653 | n_useless_values++; |
2654 | } |
2655 | |
2656 | next = v->next_containing_mem; |
2657 | if (has_mem) |
2658 | { |
2659 | *vp = v; |
2660 | vp = &(*vp)->next_containing_mem; |
2661 | } |
2662 | else |
2663 | v->next_containing_mem = NULLnullptr; |
2664 | } |
2665 | *vp = &dummy_val; |
2666 | } |
2667 | |
2668 | /* Invalidate DEST. */ |
2669 | |
2670 | void |
2671 | cselib_invalidate_rtx (rtx dest) |
2672 | { |
2673 | while (GET_CODE (dest)((enum rtx_code) (dest)->code) == SUBREG |
2674 | || GET_CODE (dest)((enum rtx_code) (dest)->code) == ZERO_EXTRACT |
2675 | || GET_CODE (dest)((enum rtx_code) (dest)->code) == STRICT_LOW_PART) |
2676 | dest = XEXP (dest, 0)(((dest)->u.fld[0]).rt_rtx); |
2677 | |
2678 | if (REG_P (dest)(((enum rtx_code) (dest)->code) == REG)) |
2679 | cselib_invalidate_regno (REGNO (dest)(rhs_regno(dest)), GET_MODE (dest)((machine_mode) (dest)->mode)); |
2680 | else if (MEM_P (dest)(((enum rtx_code) (dest)->code) == MEM)) |
2681 | cselib_invalidate_mem (dest); |
2682 | } |
2683 | |
2684 | /* A wrapper for cselib_invalidate_rtx to be called via note_stores. */ |
2685 | |
2686 | static void |
2687 | cselib_invalidate_rtx_note_stores (rtx dest, const_rtx, |
2688 | void *data ATTRIBUTE_UNUSED__attribute__ ((__unused__))) |
2689 | { |
2690 | cselib_invalidate_rtx (dest); |
2691 | } |
2692 | |
2693 | /* Record the result of a SET instruction. DEST is being set; the source |
2694 | contains the value described by SRC_ELT. If DEST is a MEM, DEST_ADDR_ELT |
2695 | describes its address. */ |
2696 | |
2697 | static void |
2698 | cselib_record_set (rtx dest, cselib_val *src_elt, cselib_val *dest_addr_elt) |
2699 | { |
2700 | if (src_elt == 0 || side_effects_p (dest)) |
2701 | return; |
2702 | |
2703 | if (REG_P (dest)(((enum rtx_code) (dest)->code) == REG)) |
2704 | { |
2705 | unsigned int dreg = REGNO (dest)(rhs_regno(dest)); |
2706 | if (dreg < FIRST_PSEUDO_REGISTER76) |
2707 | { |
2708 | unsigned int n = REG_NREGS (dest)((&(dest)->u.reg)->nregs); |
2709 | |
2710 | if (n > max_value_regs) |
2711 | max_value_regs = n; |
2712 | } |
2713 | |
2714 | if (REG_VALUES (dreg)reg_values[dreg] == 0) |
2715 | { |
2716 | used_regs[n_used_regs++] = dreg; |
2717 | REG_VALUES (dreg)reg_values[dreg] = new_elt_list (REG_VALUES (dreg)reg_values[dreg], src_elt); |
2718 | } |
2719 | else |
2720 | { |
2721 | /* The register should have been invalidated. */ |
2722 | gcc_assert (REG_VALUES (dreg)->elt == 0)((void)(!(reg_values[dreg]->elt == 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 2722, __FUNCTION__), 0 : 0)); |
2723 | REG_VALUES (dreg)reg_values[dreg]->elt = src_elt; |
2724 | } |
2725 | |
2726 | if (cselib_useless_value_p (src_elt)) |
2727 | n_useless_values--; |
2728 | new_elt_loc_list (src_elt, dest); |
2729 | } |
2730 | else if (MEM_P (dest)(((enum rtx_code) (dest)->code) == MEM) && dest_addr_elt != 0 |
2731 | && cselib_record_memory) |
2732 | { |
2733 | if (cselib_useless_value_p (src_elt)) |
2734 | n_useless_values--; |
2735 | add_mem_for_addr (dest_addr_elt, src_elt, dest); |
2736 | } |
2737 | } |
2738 | |
2739 | /* Make ELT and X's VALUE equivalent to each other at INSN. */ |
2740 | |
2741 | void |
2742 | cselib_add_permanent_equiv (cselib_val *elt, rtx x, rtx_insn *insn) |
2743 | { |
2744 | cselib_val *nelt; |
2745 | rtx_insn *save_cselib_current_insn = cselib_current_insn; |
2746 | |
2747 | gcc_checking_assert (elt)((void)(!(elt) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 2747, __FUNCTION__), 0 : 0)); |
2748 | gcc_checking_assert (PRESERVED_VALUE_P (elt->val_rtx))((void)(!((__extension__ ({ __typeof ((elt->val_rtx)) const _rtx = ((elt->val_rtx)); if (((enum rtx_code) (_rtx)-> code) != VALUE) rtl_check_failed_flag ("PRESERVED_VALUE_P", _rtx , "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 2748, __FUNCTION__); _rtx; })->unchanging)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 2748, __FUNCTION__), 0 : 0)); |
2749 | gcc_checking_assert (!side_effects_p (x))((void)(!(!side_effects_p (x)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 2749, __FUNCTION__), 0 : 0)); |
2750 | |
2751 | cselib_current_insn = insn; |
2752 | |
2753 | nelt = cselib_lookup (x, GET_MODE (elt->val_rtx)((machine_mode) (elt->val_rtx)->mode), 1, VOIDmode((void) 0, E_VOIDmode)); |
2754 | |
2755 | if (nelt != elt) |
2756 | { |
2757 | cselib_any_perm_equivs = true; |
2758 | |
2759 | if (!PRESERVED_VALUE_P (nelt->val_rtx)(__extension__ ({ __typeof ((nelt->val_rtx)) const _rtx = ( (nelt->val_rtx)); if (((enum rtx_code) (_rtx)->code) != VALUE) rtl_check_failed_flag ("PRESERVED_VALUE_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 2759, __FUNCTION__); _rtx; })->unchanging)) |
2760 | cselib_preserve_value (nelt); |
2761 | |
2762 | new_elt_loc_list (nelt, elt->val_rtx); |
2763 | } |
2764 | |
2765 | cselib_current_insn = save_cselib_current_insn; |
2766 | } |
2767 | |
2768 | /* Return TRUE if any permanent equivalences have been recorded since |
2769 | the table was last initialized. */ |
2770 | bool |
2771 | cselib_have_permanent_equivalences (void) |
2772 | { |
2773 | return cselib_any_perm_equivs; |
2774 | } |
2775 | |
2776 | /* Record stack_pointer_rtx to be equal to |
2777 | (plus:P cfa_base_preserved_val offset). Used by var-tracking |
2778 | at the start of basic blocks for !frame_pointer_needed functions. */ |
2779 | |
2780 | void |
2781 | cselib_record_sp_cfa_base_equiv (HOST_WIDE_INTlong offset, rtx_insn *insn) |
2782 | { |
2783 | rtx sp_derived_value = NULL_RTX(rtx) 0; |
2784 | for (struct elt_loc_list *l = cfa_base_preserved_val->locs; l; l = l->next) |
2785 | if (GET_CODE (l->loc)((enum rtx_code) (l->loc)->code) == VALUE |
2786 | && SP_DERIVED_VALUE_P (l->loc)(__extension__ ({ __typeof ((l->loc)) const _rtx = ((l-> loc)); if (((enum rtx_code) (_rtx)->code) != VALUE) rtl_check_failed_flag ("SP_DERIVED_VALUE_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 2786, __FUNCTION__); _rtx; })->call)) |
2787 | { |
2788 | sp_derived_value = l->loc; |
2789 | break; |
2790 | } |
2791 | else if (GET_CODE (l->loc)((enum rtx_code) (l->loc)->code) == PLUS |
2792 | && GET_CODE (XEXP (l->loc, 0))((enum rtx_code) ((((l->loc)->u.fld[0]).rt_rtx))->code ) == VALUE |
2793 | && SP_DERIVED_VALUE_P (XEXP (l->loc, 0))(__extension__ ({ __typeof (((((l->loc)->u.fld[0]).rt_rtx ))) const _rtx = (((((l->loc)->u.fld[0]).rt_rtx))); if ( ((enum rtx_code) (_rtx)->code) != VALUE) rtl_check_failed_flag ("SP_DERIVED_VALUE_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 2793, __FUNCTION__); _rtx; })->call) |
2794 | && CONST_INT_P (XEXP (l->loc, 1))(((enum rtx_code) ((((l->loc)->u.fld[1]).rt_rtx))->code ) == CONST_INT)) |
2795 | { |
2796 | sp_derived_value = XEXP (l->loc, 0)(((l->loc)->u.fld[0]).rt_rtx); |
2797 | offset = offset + UINTVAL (XEXP (l->loc, 1))((unsigned long) (((((l->loc)->u.fld[1]).rt_rtx))->u .hwint[0])); |
2798 | break; |
2799 | } |
2800 | if (sp_derived_value == NULL_RTX(rtx) 0) |
2801 | return; |
2802 | cselib_val *val |
2803 | = cselib_lookup_from_insn (plus_constant (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))), sp_derived_value, offset), |
2804 | Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))), 1, VOIDmode((void) 0, E_VOIDmode), insn); |
2805 | if (val != NULLnullptr) |
2806 | { |
2807 | PRESERVED_VALUE_P (val->val_rtx)(__extension__ ({ __typeof ((val->val_rtx)) const _rtx = ( (val->val_rtx)); if (((enum rtx_code) (_rtx)->code) != VALUE ) rtl_check_failed_flag ("PRESERVED_VALUE_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 2807, __FUNCTION__); _rtx; })->unchanging) = 1; |
2808 | cselib_record_set (stack_pointer_rtx((this_target_rtl->x_global_rtl)[GR_STACK_POINTER]), val, NULLnullptr); |
2809 | } |
2810 | } |
2811 | |
2812 | /* Return true if V is SP_DERIVED_VALUE_P (or SP_DERIVED_VALUE_P + CONST_INT) |
2813 | that can be expressed using cfa_base_preserved_val + CONST_INT. */ |
2814 | |
2815 | bool |
2816 | cselib_sp_derived_value_p (cselib_val *v) |
2817 | { |
2818 | if (!SP_DERIVED_VALUE_P (v->val_rtx)(__extension__ ({ __typeof ((v->val_rtx)) const _rtx = ((v ->val_rtx)); if (((enum rtx_code) (_rtx)->code) != VALUE ) rtl_check_failed_flag ("SP_DERIVED_VALUE_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 2818, __FUNCTION__); _rtx; })->call)) |
2819 | for (struct elt_loc_list *l = v->locs; l; l = l->next) |
2820 | if (GET_CODE (l->loc)((enum rtx_code) (l->loc)->code) == PLUS |
2821 | && GET_CODE (XEXP (l->loc, 0))((enum rtx_code) ((((l->loc)->u.fld[0]).rt_rtx))->code ) == VALUE |
2822 | && SP_DERIVED_VALUE_P (XEXP (l->loc, 0))(__extension__ ({ __typeof (((((l->loc)->u.fld[0]).rt_rtx ))) const _rtx = (((((l->loc)->u.fld[0]).rt_rtx))); if ( ((enum rtx_code) (_rtx)->code) != VALUE) rtl_check_failed_flag ("SP_DERIVED_VALUE_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 2822, __FUNCTION__); _rtx; })->call) |
2823 | && CONST_INT_P (XEXP (l->loc, 1))(((enum rtx_code) ((((l->loc)->u.fld[1]).rt_rtx))->code ) == CONST_INT)) |
2824 | v = CSELIB_VAL_PTR (XEXP (l->loc, 0))((((((l->loc)->u.fld[0]).rt_rtx))->u.fld[0]).rt_cselib ); |
2825 | if (!SP_DERIVED_VALUE_P (v->val_rtx)(__extension__ ({ __typeof ((v->val_rtx)) const _rtx = ((v ->val_rtx)); if (((enum rtx_code) (_rtx)->code) != VALUE ) rtl_check_failed_flag ("SP_DERIVED_VALUE_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 2825, __FUNCTION__); _rtx; })->call)) |
2826 | return false; |
2827 | for (struct elt_loc_list *l = v->locs; l; l = l->next) |
2828 | if (l->loc == cfa_base_preserved_val->val_rtx) |
2829 | return true; |
2830 | else if (GET_CODE (l->loc)((enum rtx_code) (l->loc)->code) == PLUS |
2831 | && XEXP (l->loc, 0)(((l->loc)->u.fld[0]).rt_rtx) == cfa_base_preserved_val->val_rtx |
2832 | && CONST_INT_P (XEXP (l->loc, 1))(((enum rtx_code) ((((l->loc)->u.fld[1]).rt_rtx))->code ) == CONST_INT)) |
2833 | return true; |
2834 | return false; |
2835 | } |
2836 | |
2837 | /* There is no good way to determine how many elements there can be |
2838 | in a PARALLEL. Since it's fairly cheap, use a really large number. */ |
2839 | #define MAX_SETS(76 * 2) (FIRST_PSEUDO_REGISTER76 * 2) |
2840 | |
2841 | struct cselib_record_autoinc_data |
2842 | { |
2843 | struct cselib_set *sets; |
2844 | int n_sets; |
2845 | }; |
2846 | |
2847 | /* Callback for for_each_inc_dec. Records in ARG the SETs implied by |
2848 | autoinc RTXs: SRC plus SRCOFF if non-NULL is stored in DEST. */ |
2849 | |
2850 | static int |
2851 | cselib_record_autoinc_cb (rtx mem ATTRIBUTE_UNUSED__attribute__ ((__unused__)), rtx op ATTRIBUTE_UNUSED__attribute__ ((__unused__)), |
2852 | rtx dest, rtx src, rtx srcoff, void *arg) |
2853 | { |
2854 | struct cselib_record_autoinc_data *data; |
2855 | data = (struct cselib_record_autoinc_data *)arg; |
2856 | |
2857 | data->sets[data->n_sets].dest = dest; |
2858 | |
2859 | if (srcoff) |
2860 | data->sets[data->n_sets].src = gen_rtx_PLUS (GET_MODE (src), src, srcoff)gen_rtx_fmt_ee_stat ((PLUS), ((((machine_mode) (src)->mode ))), ((src)), ((srcoff)) ); |
2861 | else |
2862 | data->sets[data->n_sets].src = src; |
2863 | |
2864 | data->n_sets++; |
2865 | |
2866 | return 0; |
2867 | } |
2868 | |
2869 | /* Record the effects of any sets and autoincs in INSN. */ |
2870 | static void |
2871 | cselib_record_sets (rtx_insn *insn) |
2872 | { |
2873 | int n_sets = 0; |
2874 | int i; |
2875 | struct cselib_set sets[MAX_SETS(76 * 2)]; |
2876 | rtx cond = 0; |
2877 | int n_sets_before_autoinc; |
2878 | int n_strict_low_parts = 0; |
2879 | struct cselib_record_autoinc_data data; |
2880 | |
2881 | rtx body = PATTERN (insn); |
2882 | if (GET_CODE (body)((enum rtx_code) (body)->code) == COND_EXEC) |
2883 | { |
2884 | cond = COND_EXEC_TEST (body)(((body)->u.fld[0]).rt_rtx); |
2885 | body = COND_EXEC_CODE (body)(((body)->u.fld[1]).rt_rtx); |
2886 | } |
2887 | |
2888 | /* Find all sets. */ |
2889 | if (GET_CODE (body)((enum rtx_code) (body)->code) == SET) |
2890 | { |
2891 | sets[0].src = SET_SRC (body)(((body)->u.fld[1]).rt_rtx); |
2892 | sets[0].dest = SET_DEST (body)(((body)->u.fld[0]).rt_rtx); |
2893 | n_sets = 1; |
2894 | } |
2895 | else if (GET_CODE (body)((enum rtx_code) (body)->code) == PARALLEL) |
2896 | { |
2897 | /* Look through the PARALLEL and record the values being |
2898 | set, if possible. Also handle any CLOBBERs. */ |
2899 | for (i = XVECLEN (body, 0)(((((body)->u.fld[0]).rt_rtvec))->num_elem) - 1; i >= 0; --i) |
2900 | { |
2901 | rtx x = XVECEXP (body, 0, i)(((((body)->u.fld[0]).rt_rtvec))->elem[i]); |
2902 | |
2903 | if (GET_CODE (x)((enum rtx_code) (x)->code) == SET) |
2904 | { |
2905 | sets[n_sets].src = SET_SRC (x)(((x)->u.fld[1]).rt_rtx); |
2906 | sets[n_sets].dest = SET_DEST (x)(((x)->u.fld[0]).rt_rtx); |
2907 | n_sets++; |
2908 | } |
2909 | } |
2910 | } |
2911 | |
2912 | if (n_sets == 1 |
2913 | && MEM_P (sets[0].src)(((enum rtx_code) (sets[0].src)->code) == MEM) |
2914 | && !cselib_record_memory |
2915 | && MEM_READONLY_P (sets[0].src)(__extension__ ({ __typeof ((sets[0].src)) const _rtx = ((sets [0].src)); if (((enum rtx_code) (_rtx)->code) != MEM) rtl_check_failed_flag ("MEM_READONLY_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 2915, __FUNCTION__); _rtx; })->unchanging)) |
2916 | { |
2917 | rtx note = find_reg_equal_equiv_note (insn); |
2918 | |
2919 | if (note && CONSTANT_P (XEXP (note, 0))((rtx_class[(int) (((enum rtx_code) ((((note)->u.fld[0]).rt_rtx ))->code))]) == RTX_CONST_OBJ)) |
2920 | sets[0].src = XEXP (note, 0)(((note)->u.fld[0]).rt_rtx); |
2921 | } |
2922 | |
2923 | data.sets = sets; |
2924 | data.n_sets = n_sets_before_autoinc = n_sets; |
2925 | for_each_inc_dec (PATTERN (insn), cselib_record_autoinc_cb, &data); |
2926 | n_sets = data.n_sets; |
2927 | |
2928 | /* Look up the values that are read. Do this before invalidating the |
2929 | locations that are written. */ |
2930 | for (i = 0; i < n_sets; i++) |
2931 | { |
2932 | rtx dest = sets[i].dest; |
2933 | rtx orig = dest; |
2934 | |
2935 | /* A STRICT_LOW_PART can be ignored; we'll record the equivalence for |
2936 | the low part after invalidating any knowledge about larger modes. */ |
2937 | if (GET_CODE (sets[i].dest)((enum rtx_code) (sets[i].dest)->code) == STRICT_LOW_PART) |
2938 | sets[i].dest = dest = XEXP (dest, 0)(((dest)->u.fld[0]).rt_rtx); |
2939 | |
2940 | /* We don't know how to record anything but REG or MEM. */ |
2941 | if (REG_P (dest)(((enum rtx_code) (dest)->code) == REG) |
2942 | || (MEM_P (dest)(((enum rtx_code) (dest)->code) == MEM) && cselib_record_memory)) |
2943 | { |
2944 | rtx src = sets[i].src; |
2945 | if (cond) |
2946 | src = gen_rtx_IF_THEN_ELSE (GET_MODE (dest), cond, src, dest)gen_rtx_fmt_eee_stat ((IF_THEN_ELSE), ((((machine_mode) (dest )->mode))), ((cond)), ((src)), ((dest)) ); |
2947 | sets[i].src_elt = cselib_lookup (src, GET_MODE (dest)((machine_mode) (dest)->mode), 1, VOIDmode((void) 0, E_VOIDmode)); |
2948 | if (MEM_P (dest)(((enum rtx_code) (dest)->code) == MEM)) |
2949 | { |
2950 | machine_mode address_mode = get_address_mode (dest); |
2951 | |
2952 | sets[i].dest_addr_elt = cselib_lookup (XEXP (dest, 0)(((dest)->u.fld[0]).rt_rtx), |
2953 | address_mode, 1, |
2954 | GET_MODE (dest)((machine_mode) (dest)->mode)); |
2955 | } |
2956 | else |
2957 | sets[i].dest_addr_elt = 0; |
2958 | } |
2959 | |
2960 | /* Improve handling of STRICT_LOW_PART if the current value is known |
2961 | to be const0_rtx, then the low bits will be set to dest and higher |
2962 | bits will remain zero. Used in code like: |
2963 | |
2964 | {di:SI=0;clobber flags:CC;} |
2965 | flags:CCNO=cmp(bx:SI,0) |
2966 | strict_low_part(di:QI)=flags:CCNO<=0 |
2967 | |
2968 | where we can note both that di:QI=flags:CCNO<=0 and |
2969 | also that because di:SI is known to be 0 and strict_low_part(di:QI) |
2970 | preserves the upper bits that di:SI=zero_extend(flags:CCNO<=0). */ |
2971 | scalar_int_mode mode; |
2972 | if (dest != orig |
2973 | && cselib_record_sets_hook |
2974 | && REG_P (dest)(((enum rtx_code) (dest)->code) == REG) |
2975 | && HARD_REGISTER_P (dest)((((rhs_regno(dest))) < 76)) |
2976 | && sets[i].src_elt |
2977 | && is_a <scalar_int_mode> (GET_MODE (dest)((machine_mode) (dest)->mode), &mode) |
2978 | && n_sets + n_strict_low_parts < MAX_SETS(76 * 2)) |
2979 | { |
2980 | opt_scalar_int_mode wider_mode_iter; |
2981 | FOR_EACH_WIDER_MODE (wider_mode_iter, mode)for ((wider_mode_iter) = (mode), mode_iterator::get_wider (& (wider_mode_iter)); mode_iterator::iterate_p (&(wider_mode_iter )); mode_iterator::get_wider (&(wider_mode_iter))) |
2982 | { |
2983 | scalar_int_mode wider_mode = wider_mode_iter.require (); |
2984 | if (GET_MODE_PRECISION (wider_mode) > BITS_PER_WORD((8) * (((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) ? 8 : 4))) |
2985 | break; |
2986 | |
2987 | rtx reg = gen_lowpartrtl_hooks.gen_lowpart (wider_mode, dest); |
2988 | if (!REG_P (reg)(((enum rtx_code) (reg)->code) == REG)) |
2989 | break; |
2990 | |
2991 | cselib_val *v = cselib_lookup (reg, wider_mode, 0, VOIDmode((void) 0, E_VOIDmode)); |
2992 | if (!v) |
2993 | continue; |
2994 | |
2995 | struct elt_loc_list *l; |
2996 | for (l = v->locs; l; l = l->next) |
2997 | if (l->loc == const0_rtx(const_int_rtx[64])) |
2998 | break; |
2999 | |
3000 | if (!l) |
3001 | continue; |
3002 | |
3003 | sets[n_sets + n_strict_low_parts].dest = reg; |
3004 | sets[n_sets + n_strict_low_parts].src = dest; |
3005 | sets[n_sets + n_strict_low_parts++].src_elt = sets[i].src_elt; |
3006 | break; |
3007 | } |
3008 | } |
3009 | } |
3010 | |
3011 | if (cselib_record_sets_hook) |
3012 | cselib_record_sets_hook (insn, sets, n_sets); |
3013 | |
3014 | /* Invalidate all locations written by this insn. Note that the elts we |
3015 | looked up in the previous loop aren't affected, just some of their |
3016 | locations may go away. */ |
3017 | note_pattern_stores (body, cselib_invalidate_rtx_note_stores, NULLnullptr); |
3018 | |
3019 | for (i = n_sets_before_autoinc; i < n_sets; i++) |
3020 | cselib_invalidate_rtx (sets[i].dest); |
3021 | |
3022 | /* If this is an asm, look for duplicate sets. This can happen when the |
3023 | user uses the same value as an output multiple times. This is valid |
3024 | if the outputs are not actually used thereafter. Treat this case as |
3025 | if the value isn't actually set. We do this by smashing the destination |
3026 | to pc_rtx, so that we won't record the value later. */ |
3027 | if (n_sets >= 2 && asm_noperands (body) >= 0) |
3028 | { |
3029 | for (i = 0; i < n_sets; i++) |
3030 | { |
3031 | rtx dest = sets[i].dest; |
3032 | if (REG_P (dest)(((enum rtx_code) (dest)->code) == REG) || MEM_P (dest)(((enum rtx_code) (dest)->code) == MEM)) |
3033 | { |
3034 | int j; |
3035 | for (j = i + 1; j < n_sets; j++) |
3036 | if (rtx_equal_p (dest, sets[j].dest)) |
3037 | { |
3038 | sets[i].dest = pc_rtx; |
3039 | sets[j].dest = pc_rtx; |
3040 | } |
3041 | } |
3042 | } |
3043 | } |
3044 | |
3045 | /* Now enter the equivalences in our tables. */ |
3046 | for (i = 0; i < n_sets; i++) |
3047 | { |
3048 | rtx dest = sets[i].dest; |
3049 | if (REG_P (dest)(((enum rtx_code) (dest)->code) == REG) |
3050 | || (MEM_P (dest)(((enum rtx_code) (dest)->code) == MEM) && cselib_record_memory)) |
3051 | cselib_record_set (dest, sets[i].src_elt, sets[i].dest_addr_elt); |
3052 | } |
3053 | |
3054 | /* And deal with STRICT_LOW_PART. */ |
3055 | for (i = 0; i < n_strict_low_parts; i++) |
3056 | { |
3057 | if (! PRESERVED_VALUE_P (sets[n_sets + i].src_elt->val_rtx)(__extension__ ({ __typeof ((sets[n_sets + i].src_elt->val_rtx )) const _rtx = ((sets[n_sets + i].src_elt->val_rtx)); if ( ((enum rtx_code) (_rtx)->code) != VALUE) rtl_check_failed_flag ("PRESERVED_VALUE_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 3057, __FUNCTION__); _rtx; })->unchanging)) |
3058 | continue; |
3059 | machine_mode dest_mode = GET_MODE (sets[n_sets + i].dest)((machine_mode) (sets[n_sets + i].dest)->mode); |
3060 | cselib_val *v |
3061 | = cselib_lookup (sets[n_sets + i].dest, dest_mode, 1, VOIDmode((void) 0, E_VOIDmode)); |
3062 | cselib_preserve_value (v); |
3063 | rtx r = gen_rtx_ZERO_EXTEND (dest_mode,gen_rtx_fmt_e_stat ((ZERO_EXTEND), ((dest_mode)), ((sets[n_sets + i].src_elt->val_rtx)) ) |
3064 | sets[n_sets + i].src_elt->val_rtx)gen_rtx_fmt_e_stat ((ZERO_EXTEND), ((dest_mode)), ((sets[n_sets + i].src_elt->val_rtx)) ); |
3065 | cselib_add_permanent_equiv (v, r, insn); |
3066 | } |
3067 | } |
3068 | |
3069 | /* Return true if INSN in the prologue initializes hard_frame_pointer_rtx. */ |
3070 | |
3071 | bool |
3072 | fp_setter_insn (rtx_insn *insn) |
3073 | { |
3074 | rtx expr, pat = NULL_RTX(rtx) 0; |
3075 | |
3076 | if (!RTX_FRAME_RELATED_P (insn)(__extension__ ({ __typeof ((insn)) const _rtx = ((insn)); if (((enum rtx_code) (_rtx)->code) != DEBUG_INSN && ( (enum rtx_code) (_rtx)->code) != INSN && ((enum rtx_code ) (_rtx)->code) != CALL_INSN && ((enum rtx_code) ( _rtx)->code) != JUMP_INSN && ((enum rtx_code) (_rtx )->code) != BARRIER && ((enum rtx_code) (_rtx)-> code) != SET) rtl_check_failed_flag ("RTX_FRAME_RELATED_P",_rtx , "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 3076, __FUNCTION__); _rtx; })->frame_related)) |
3077 | return false; |
3078 | |
3079 | expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX(rtx) 0); |
3080 | if (expr) |
3081 | pat = XEXP (expr, 0)(((expr)->u.fld[0]).rt_rtx); |
3082 | if (!modified_in_p (hard_frame_pointer_rtx((this_target_rtl->x_global_rtl)[GR_HARD_FRAME_POINTER]), pat ? pat : insn)) |
3083 | return false; |
3084 | |
3085 | /* Don't return true for frame pointer restores in the epilogue. */ |
3086 | if (find_reg_note (insn, REG_CFA_RESTORE, hard_frame_pointer_rtx((this_target_rtl->x_global_rtl)[GR_HARD_FRAME_POINTER]))) |
3087 | return false; |
3088 | return true; |
3089 | } |
3090 | |
3091 | /* V is one of the values in REG_VALUES (REGNO). Return true if it |
3092 | would be invalidated by CALLEE_ABI. */ |
3093 | |
3094 | static bool |
3095 | cselib_invalidated_by_call_p (const function_abi &callee_abi, |
3096 | unsigned int regno, cselib_val *v) |
3097 | { |
3098 | machine_mode mode = GET_MODE (v->val_rtx)((machine_mode) (v->val_rtx)->mode); |
3099 | if (mode == VOIDmode((void) 0, E_VOIDmode)) |
3100 | { |
3101 | v = REG_VALUES (regno)reg_values[regno]->elt; |
3102 | if (!v) |
3103 | /* If we don't know what the mode of the constant value is, and we |
3104 | don't know what mode the register was set in, conservatively |
3105 | assume that the register is clobbered. The value's going to be |
3106 | essentially useless in this case anyway. */ |
3107 | return true; |
3108 | mode = GET_MODE (v->val_rtx)((machine_mode) (v->val_rtx)->mode); |
3109 | } |
3110 | return callee_abi.clobbers_reg_p (mode, regno); |
3111 | } |
3112 | |
3113 | /* Record the effects of INSN. */ |
3114 | |
3115 | void |
3116 | cselib_process_insn (rtx_insn *insn) |
3117 | { |
3118 | int i; |
3119 | rtx x; |
3120 | |
3121 | cselib_current_insn = insn; |
3122 | |
3123 | /* Forget everything at a CODE_LABEL or a setjmp. */ |
3124 | if ((LABEL_P (insn)(((enum rtx_code) (insn)->code) == CODE_LABEL) |
3125 | || (CALL_P (insn)(((enum rtx_code) (insn)->code) == CALL_INSN) |
3126 | && find_reg_note (insn, REG_SETJMP, NULLnullptr))) |
3127 | && !cselib_preserve_constants) |
3128 | { |
3129 | cselib_reset_table (next_uid); |
3130 | cselib_current_insn = NULLnullptr; |
3131 | return; |
3132 | } |
3133 | |
3134 | if (! INSN_P (insn)(((((enum rtx_code) (insn)->code) == INSN) || (((enum rtx_code ) (insn)->code) == JUMP_INSN) || (((enum rtx_code) (insn)-> code) == CALL_INSN)) || (((enum rtx_code) (insn)->code) == DEBUG_INSN))) |
3135 | { |
3136 | cselib_current_insn = NULLnullptr; |
3137 | return; |
3138 | } |
3139 | |
3140 | /* If this is a call instruction, forget anything stored in a |
3141 | call clobbered register, or, if this is not a const call, in |
3142 | memory. */ |
3143 | if (CALL_P (insn)(((enum rtx_code) (insn)->code) == CALL_INSN)) |
3144 | { |
3145 | function_abi callee_abi = insn_callee_abi (insn); |
3146 | for (i = 0; i < FIRST_PSEUDO_REGISTER76; i++) |
3147 | { |
3148 | elt_list **l = ®_VALUES (i)reg_values[i]; |
3149 | while (*l) |
3150 | { |
3151 | cselib_val *v = (*l)->elt; |
3152 | if (v && cselib_invalidated_by_call_p (callee_abi, i, v)) |
3153 | cselib_invalidate_regno_val (i, l); |
3154 | else |
3155 | l = &(*l)->next; |
3156 | } |
3157 | } |
3158 | |
3159 | /* Since it is not clear how cselib is going to be used, be |
3160 | conservative here and treat looping pure or const functions |
3161 | as if they were regular functions. */ |
3162 | if (RTL_LOOPING_CONST_OR_PURE_CALL_P (insn)(__extension__ ({ __typeof ((insn)) const _rtx = ((insn)); if (((enum rtx_code) (_rtx)->code) != CALL_INSN) rtl_check_failed_flag ("CONST_OR_PURE_CALL_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 3162, __FUNCTION__); _rtx; })->call) |
3163 | || !(RTL_CONST_OR_PURE_CALL_P (insn)((__extension__ ({ __typeof ((insn)) const _rtx = ((insn)); if (((enum rtx_code) (_rtx)->code) != CALL_INSN) rtl_check_failed_flag ("RTL_CONST_CALL_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 3163, __FUNCTION__); _rtx; })->unchanging) || (__extension__ ({ __typeof ((insn)) const _rtx = ((insn)); if (((enum rtx_code ) (_rtx)->code) != CALL_INSN) rtl_check_failed_flag ("RTL_PURE_CALL_P" , _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cselib.cc" , 3163, __FUNCTION__); _rtx; })->return_val)))) |
3164 | cselib_invalidate_mem (callmem); |
3165 | else |
3166 | /* For const/pure calls, invalidate any argument slots because |
3167 | they are owned by the callee. */ |
3168 | for (x = CALL_INSN_FUNCTION_USAGE (insn)(((insn)->u.fld[7]).rt_rtx); x; x = XEXP (x, 1)(((x)->u.fld[1]).rt_rtx)) |
3169 | if (GET_CODE (XEXP (x, 0))((enum rtx_code) ((((x)->u.fld[0]).rt_rtx))->code) == USE |
3170 | && MEM_P (XEXP (XEXP (x, 0), 0))(((enum rtx_code) (((((((x)->u.fld[0]).rt_rtx))->u.fld[ 0]).rt_rtx))->code) == MEM)) |
3171 | cselib_invalidate_mem (XEXP (XEXP (x, 0), 0)((((((x)->u.fld[0]).rt_rtx))->u.fld[0]).rt_rtx)); |
3172 | } |
3173 | |
3174 | cselib_record_sets (insn); |
3175 | |
3176 | /* Look for any CLOBBERs in CALL_INSN_FUNCTION_USAGE, but only |
3177 | after we have processed the insn. */ |
3178 | if (CALL_P (insn)(((enum rtx_code) (insn)->code) == CALL_INSN)) |
3179 | { |
3180 | for (x = CALL_INSN_FUNCTION_USAGE (insn)(((insn)->u.fld[7]).rt_rtx); x; x = XEXP (x, 1)(((x)->u.fld[1]).rt_rtx)) |
3181 | if (GET_CODE (XEXP (x, 0))((enum rtx_code) ((((x)->u.fld[0]).rt_rtx))->code) == CLOBBER) |
3182 | cselib_invalidate_rtx (XEXP (XEXP (x, 0), 0)((((((x)->u.fld[0]).rt_rtx))->u.fld[0]).rt_rtx)); |
3183 | |
3184 | /* Flush everything on setjmp. */ |
3185 | if (cselib_preserve_constants |
3186 | && find_reg_note (insn, REG_SETJMP, NULLnullptr)) |
3187 | { |
3188 | cselib_preserve_only_values (); |
3189 | cselib_reset_table (next_uid); |
3190 | } |
3191 | } |
3192 | |
3193 | /* On setter of the hard frame pointer if frame_pointer_needed, |
3194 | invalidate stack_pointer_rtx, so that sp and {,h}fp based |
3195 | VALUEs are distinct. */ |
3196 | if (reload_completed |
3197 | && frame_pointer_needed((&x_rtl)->frame_pointer_needed) |
3198 | && fp_setter_insn (insn)) |
3199 | cselib_invalidate_rtx (stack_pointer_rtx((this_target_rtl->x_global_rtl)[GR_STACK_POINTER])); |
3200 | |
3201 | cselib_current_insn = NULLnullptr; |
3202 | |
3203 | if (n_useless_values > MAX_USELESS_VALUES32 |
3204 | /* remove_useless_values is linear in the hash table size. Avoid |
3205 | quadratic behavior for very large hashtables with very few |
3206 | useless elements. */ |
3207 | && ((unsigned int)n_useless_values |
3208 | > (cselib_hash_table->elements () - n_debug_values) / 4)) |
3209 | remove_useless_values (); |
3210 | } |
3211 | |
3212 | /* Initialize cselib for one pass. The caller must also call |
3213 | init_alias_analysis. */ |
3214 | |
3215 | void |
3216 | cselib_init (int record_what) |
3217 | { |
3218 | cselib_record_memory = record_what & CSELIB_RECORD_MEMORY; |
3219 | cselib_preserve_constants = record_what & CSELIB_PRESERVE_CONSTANTS; |
3220 | cselib_any_perm_equivs = false; |
3221 | |
3222 | /* (mem:BLK (scratch)) is a special mechanism to conflict with everything, |
3223 | see canon_true_dependence. This is only created once. */ |
3224 | if (! callmem) |
3225 | callmem = gen_rtx_MEM (BLKmode((void) 0, E_BLKmode), gen_rtx_SCRATCH (VOIDmode)gen_rtx_fmt__stat ((SCRATCH), ((((void) 0, E_VOIDmode))) )); |
3226 | |
3227 | cselib_nregs = max_reg_num (); |
3228 | |
3229 | /* We preserve reg_values to allow expensive clearing of the whole thing. |
3230 | Reallocate it however if it happens to be too large. */ |
3231 | if (!reg_values || reg_values_size < cselib_nregs |
3232 | || (reg_values_size > 10 && reg_values_size > cselib_nregs * 4)) |
3233 | { |
3234 | free (reg_values); |
3235 | /* Some space for newly emit instructions so we don't end up |
3236 | reallocating in between passes. */ |
3237 | reg_values_size = cselib_nregs + (63 + cselib_nregs) / 16; |
3238 | reg_values = XCNEWVEC (struct elt_list *, reg_values_size)((struct elt_list * *) xcalloc ((reg_values_size), sizeof (struct elt_list *))); |
3239 | } |
3240 | used_regs = XNEWVEC (unsigned int, cselib_nregs)((unsigned int *) xmalloc (sizeof (unsigned int) * (cselib_nregs ))); |
3241 | n_used_regs = 0; |
3242 | /* FIXME: enable sanitization (PR87845) */ |
3243 | cselib_hash_table |
3244 | = new hash_table<cselib_hasher> (31, /* ggc */ false, |
3245 | /* sanitize_eq_and_hash */ false); |
3246 | if (cselib_preserve_constants) |
3247 | cselib_preserved_hash_table |
3248 | = new hash_table<cselib_hasher> (31, /* ggc */ false, |
3249 | /* sanitize_eq_and_hash */ false); |
3250 | next_uid = 1; |
3251 | } |
3252 | |
3253 | /* Called when the current user is done with cselib. */ |
3254 | |
3255 | void |
3256 | cselib_finish (void) |
3257 | { |
3258 | bool preserved = cselib_preserve_constants; |
3259 | cselib_discard_hook = NULLnullptr; |
3260 | cselib_preserve_constants = false; |
3261 | cselib_any_perm_equivs = false; |
3262 | cfa_base_preserved_val = NULLnullptr; |
3263 | cfa_base_preserved_regno = INVALID_REGNUM(~(unsigned int) 0); |
3264 | elt_list_pool.release (); |
3265 | elt_loc_list_pool.release (); |
3266 | cselib_val_pool.release (); |
3267 | value_pool.release (); |
3268 | cselib_clear_table (); |
3269 | delete cselib_hash_table; |
3270 | cselib_hash_table = NULLnullptr; |
3271 | if (preserved) |
3272 | delete cselib_preserved_hash_table; |
3273 | cselib_preserved_hash_table = NULLnullptr; |
3274 | free (used_regs); |
3275 | used_regs = 0; |
3276 | n_useless_values = 0; |
3277 | n_useless_debug_values = 0; |
3278 | n_debug_values = 0; |
3279 | next_uid = 0; |
3280 | } |
3281 | |
3282 | /* Dump the cselib_val *X to FILE *OUT. */ |
3283 | |
3284 | int |
3285 | dump_cselib_val (cselib_val **x, FILE *out) |
3286 | { |
3287 | cselib_val *v = *x; |
3288 | bool need_lf = true; |
3289 | |
3290 | print_inline_rtx (out, v->val_rtx, 0); |
3291 | |
3292 | if (v->locs) |
3293 | { |
3294 | struct elt_loc_list *l = v->locs; |
3295 | if (need_lf) |
3296 | { |
3297 | fputc ('\n', out); |
3298 | need_lf = false; |
3299 | } |
3300 | fputs (" locs:", out); |
3301 | do |
3302 | { |
3303 | if (l->setting_insn) |
3304 | fprintf (out, "\n from insn %i ", |
3305 | INSN_UID (l->setting_insn)); |
3306 | else |
3307 | fprintf (out, "\n "); |
3308 | print_inline_rtx (out, l->loc, 4); |
3309 | } |
3310 | while ((l = l->next)); |
3311 | fputc ('\n', out); |
3312 | } |
3313 | else |
3314 | { |
3315 | fputs (" no locs", out); |
3316 | need_lf = true; |
3317 | } |
3318 | |
3319 | if (v->addr_list) |
3320 | { |
3321 | struct elt_list *e = v->addr_list; |
3322 | if (need_lf) |
3323 | { |
3324 | fputc ('\n', out); |
3325 | need_lf = false; |
3326 | } |
3327 | fputs (" addr list:", out); |
3328 | do |
3329 | { |
3330 | fputs ("\n ", out); |
3331 | print_inline_rtx (out, e->elt->val_rtx, 2); |
3332 | } |
3333 | while ((e = e->next)); |
3334 | fputc ('\n', out); |
3335 | } |
3336 | else |
3337 | { |
3338 | fputs (" no addrs", out); |
3339 | need_lf = true; |
3340 | } |
3341 | |
3342 | if (v->next_containing_mem == &dummy_val) |
3343 | fputs (" last mem\n", out); |
3344 | else if (v->next_containing_mem) |
3345 | { |
3346 | fputs (" next mem ", out); |
3347 | print_inline_rtx (out, v->next_containing_mem->val_rtx, 2); |
3348 | fputc ('\n', out); |
3349 | } |
3350 | else if (need_lf) |
3351 | fputc ('\n', out); |
3352 | |
3353 | return 1; |
3354 | } |
3355 | |
3356 | /* Dump to OUT everything in the CSELIB table. */ |
3357 | |
3358 | void |
3359 | dump_cselib_table (FILE *out) |
3360 | { |
3361 | fprintf (out, "cselib hash table:\n"); |
3362 | cselib_hash_table->traverse <FILE *, dump_cselib_val> (out); |
3363 | fprintf (out, "cselib preserved hash table:\n"); |
3364 | cselib_preserved_hash_table->traverse <FILE *, dump_cselib_val> (out); |
3365 | if (first_containing_mem != &dummy_val) |
3366 | { |
3367 | fputs ("first mem ", out); |
3368 | print_inline_rtx (out, first_containing_mem->val_rtx, 2); |
3369 | fputc ('\n', out); |
3370 | } |
3371 | fprintf (out, "next uid %i\n", next_uid); |
3372 | } |
3373 | |
3374 | #include "gt-cselib.h" |