File: | build/gcc/tree-tailcall.cc |
Warning: | line 943, column 10 Although the value stored to 'orig_stmt' is used in the enclosing expression, the value is never actually read from 'orig_stmt' |
Press '?' to see keyboard shortcuts
Keyboard shortcuts:
1 | /* Tail call optimization on trees. |
2 | Copyright (C) 2003-2023 Free Software Foundation, Inc. |
3 | |
4 | This file is part of GCC. |
5 | |
6 | GCC is free software; you can redistribute it and/or modify |
7 | it under the terms of the GNU General Public License as published by |
8 | the Free Software Foundation; either version 3, or (at your option) |
9 | any later version. |
10 | |
11 | GCC is distributed in the hope that it will be useful, |
12 | but WITHOUT ANY WARRANTY; without even the implied warranty of |
13 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
14 | GNU General Public License for more details. |
15 | |
16 | You should have received a copy of the GNU General Public License |
17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ |
19 | |
20 | #include "config.h" |
21 | #include "system.h" |
22 | #include "coretypes.h" |
23 | #include "backend.h" |
24 | #include "rtl.h" |
25 | #include "tree.h" |
26 | #include "gimple.h" |
27 | #include "cfghooks.h" |
28 | #include "tree-pass.h" |
29 | #include "ssa.h" |
30 | #include "cgraph.h" |
31 | #include "gimple-pretty-print.h" |
32 | #include "fold-const.h" |
33 | #include "stor-layout.h" |
34 | #include "gimple-iterator.h" |
35 | #include "gimplify-me.h" |
36 | #include "tree-cfg.h" |
37 | #include "tree-into-ssa.h" |
38 | #include "tree-dfa.h" |
39 | #include "except.h" |
40 | #include "tree-eh.h" |
41 | #include "dbgcnt.h" |
42 | #include "cfgloop.h" |
43 | #include "common/common-target.h" |
44 | #include "ipa-utils.h" |
45 | #include "tree-ssa-live.h" |
46 | |
47 | /* The file implements the tail recursion elimination. It is also used to |
48 | analyze the tail calls in general, passing the results to the rtl level |
49 | where they are used for sibcall optimization. |
50 | |
51 | In addition to the standard tail recursion elimination, we handle the most |
52 | trivial cases of making the call tail recursive by creating accumulators. |
53 | For example the following function |
54 | |
55 | int sum (int n) |
56 | { |
57 | if (n > 0) |
58 | return n + sum (n - 1); |
59 | else |
60 | return 0; |
61 | } |
62 | |
63 | is transformed into |
64 | |
65 | int sum (int n) |
66 | { |
67 | int acc = 0; |
68 | |
69 | while (n > 0) |
70 | acc += n--; |
71 | |
72 | return acc; |
73 | } |
74 | |
75 | To do this, we maintain two accumulators (a_acc and m_acc) that indicate |
76 | when we reach the return x statement, we should return a_acc + x * m_acc |
77 | instead. They are initially initialized to 0 and 1, respectively, |
78 | so the semantics of the function is obviously preserved. If we are |
79 | guaranteed that the value of the accumulator never change, we |
80 | omit the accumulator. |
81 | |
82 | There are three cases how the function may exit. The first one is |
83 | handled in adjust_return_value, the other two in adjust_accumulator_values |
84 | (the second case is actually a special case of the third one and we |
85 | present it separately just for clarity): |
86 | |
87 | 1) Just return x, where x is not in any of the remaining special shapes. |
88 | We rewrite this to a gimple equivalent of return m_acc * x + a_acc. |
89 | |
90 | 2) return f (...), where f is the current function, is rewritten in a |
91 | classical tail-recursion elimination way, into assignment of arguments |
92 | and jump to the start of the function. Values of the accumulators |
93 | are unchanged. |
94 | |
95 | 3) return a + m * f(...), where a and m do not depend on call to f. |
96 | To preserve the semantics described before we want this to be rewritten |
97 | in such a way that we finally return |
98 | |
99 | a_acc + (a + m * f(...)) * m_acc = (a_acc + a * m_acc) + (m * m_acc) * f(...). |
100 | |
101 | I.e. we increase a_acc by a * m_acc, multiply m_acc by m and |
102 | eliminate the tail call to f. Special cases when the value is just |
103 | added or just multiplied are obtained by setting a = 0 or m = 1. |
104 | |
105 | TODO -- it is possible to do similar tricks for other operations. */ |
106 | |
107 | /* A structure that describes the tailcall. */ |
108 | |
109 | struct tailcall |
110 | { |
111 | /* The iterator pointing to the call statement. */ |
112 | gimple_stmt_iterator call_gsi; |
113 | |
114 | /* True if it is a call to the current function. */ |
115 | bool tail_recursion; |
116 | |
117 | /* The return value of the caller is mult * f + add, where f is the return |
118 | value of the call. */ |
119 | tree mult, add; |
120 | |
121 | /* Next tailcall in the chain. */ |
122 | struct tailcall *next; |
123 | }; |
124 | |
125 | /* The variables holding the value of multiplicative and additive |
126 | accumulator. */ |
127 | static tree m_acc, a_acc; |
128 | |
129 | /* Bitmap with a bit for each function parameter which is set to true if we |
130 | have to copy the parameter for conversion of tail-recursive calls. */ |
131 | |
132 | static bitmap tailr_arg_needs_copy; |
133 | |
134 | /* Returns false when the function is not suitable for tail call optimization |
135 | from some reason (e.g. if it takes variable number of arguments). */ |
136 | |
137 | static bool |
138 | suitable_for_tail_opt_p (void) |
139 | { |
140 | if (cfun(cfun + 0)->stdarg) |
141 | return false; |
142 | |
143 | return true; |
144 | } |
145 | |
146 | /* Returns false when the function is not suitable for tail call optimization |
147 | for some reason (e.g. if it takes variable number of arguments). |
148 | This test must pass in addition to suitable_for_tail_opt_p in order to make |
149 | tail call discovery happen. */ |
150 | |
151 | static bool |
152 | suitable_for_tail_call_opt_p (void) |
153 | { |
154 | tree param; |
155 | |
156 | /* alloca (until we have stack slot life analysis) inhibits |
157 | sibling call optimizations, but not tail recursion. */ |
158 | if (cfun(cfun + 0)->calls_alloca) |
159 | return false; |
160 | |
161 | /* If we are using sjlj exceptions, we may need to add a call to |
162 | _Unwind_SjLj_Unregister at exit of the function. Which means |
163 | that we cannot do any sibcall transformations. */ |
164 | if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ |
165 | && current_function_has_exception_handlers ()) |
166 | return false; |
167 | |
168 | /* Any function that calls setjmp might have longjmp called from |
169 | any called function. ??? We really should represent this |
170 | properly in the CFG so that this needn't be special cased. */ |
171 | if (cfun(cfun + 0)->calls_setjmp) |
172 | return false; |
173 | |
174 | /* Various targets don't handle tail calls correctly in functions |
175 | that call __builtin_eh_return. */ |
176 | if (cfun(cfun + 0)->calls_eh_return) |
177 | return false; |
178 | |
179 | /* ??? It is OK if the argument of a function is taken in some cases, |
180 | but not in all cases. See PR15387 and PR19616. Revisit for 4.1. */ |
181 | for (param = DECL_ARGUMENTS (current_function_decl)((tree_check ((current_function_decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 181, __FUNCTION__, (FUNCTION_DECL)))->function_decl.arguments ); |
182 | param; |
183 | param = DECL_CHAIN (param)(((contains_struct_check (((contains_struct_check ((param), ( TS_DECL_MINIMAL), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 183, __FUNCTION__))), (TS_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 183, __FUNCTION__))->common.chain))) |
184 | if (TREE_ADDRESSABLE (param)((param)->base.addressable_flag)) |
185 | return false; |
186 | |
187 | return true; |
188 | } |
189 | |
190 | /* Checks whether the expression EXPR in stmt AT is independent of the |
191 | statement pointed to by GSI (in a sense that we already know EXPR's value |
192 | at GSI). We use the fact that we are only called from the chain of |
193 | basic blocks that have only single successor. Returns the expression |
194 | containing the value of EXPR at GSI. */ |
195 | |
196 | static tree |
197 | independent_of_stmt_p (tree expr, gimple *at, gimple_stmt_iterator gsi, |
198 | bitmap to_move) |
199 | { |
200 | basic_block bb, call_bb, at_bb; |
201 | edge e; |
202 | edge_iterator ei; |
203 | |
204 | if (is_gimple_min_invariant (expr)) |
205 | return expr; |
206 | |
207 | if (TREE_CODE (expr)((enum tree_code) (expr)->base.code) != SSA_NAME) |
208 | return NULL_TREE(tree) nullptr; |
209 | |
210 | if (bitmap_bit_p (to_move, SSA_NAME_VERSION (expr)(tree_check ((expr), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 210, __FUNCTION__, (SSA_NAME)))->base.u.version)) |
211 | return expr; |
212 | |
213 | /* Mark the blocks in the chain leading to the end. */ |
214 | at_bb = gimple_bb (at); |
215 | call_bb = gimple_bb (gsi_stmt (gsi)); |
216 | for (bb = call_bb; bb != at_bb; bb = single_succ (bb)) |
217 | bb->aux = &bb->aux; |
218 | bb->aux = &bb->aux; |
219 | |
220 | while (1) |
221 | { |
222 | at = SSA_NAME_DEF_STMT (expr)(tree_check ((expr), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 222, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt; |
223 | bb = gimple_bb (at); |
224 | |
225 | /* The default definition or defined before the chain. */ |
226 | if (!bb || !bb->aux) |
227 | break; |
228 | |
229 | if (bb == call_bb) |
230 | { |
231 | for (; !gsi_end_p (gsi); gsi_next (&gsi)) |
232 | if (gsi_stmt (gsi) == at) |
233 | break; |
234 | |
235 | if (!gsi_end_p (gsi)) |
236 | expr = NULL_TREE(tree) nullptr; |
237 | break; |
238 | } |
239 | |
240 | if (gimple_code (at) != GIMPLE_PHI) |
241 | { |
242 | expr = NULL_TREE(tree) nullptr; |
243 | break; |
244 | } |
245 | |
246 | FOR_EACH_EDGE (e, ei, bb->preds)for ((ei) = ei_start_1 (&((bb->preds))); ei_cond ((ei) , &(e)); ei_next (&(ei))) |
247 | if (e->src->aux) |
248 | break; |
249 | gcc_assert (e)((void)(!(e) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 249, __FUNCTION__), 0 : 0)); |
250 | |
251 | expr = PHI_ARG_DEF_FROM_EDGE (at, e)gimple_phi_arg_def (((at)), ((e)->dest_idx)); |
252 | if (TREE_CODE (expr)((enum tree_code) (expr)->base.code) != SSA_NAME) |
253 | { |
254 | /* The value is a constant. */ |
255 | break; |
256 | } |
257 | } |
258 | |
259 | /* Unmark the blocks. */ |
260 | for (bb = call_bb; bb != at_bb; bb = single_succ (bb)) |
261 | bb->aux = NULLnullptr; |
262 | bb->aux = NULLnullptr; |
263 | |
264 | return expr; |
265 | } |
266 | |
267 | enum par { FAIL, OK, TRY_MOVE }; |
268 | |
269 | /* Simulates the effect of an assignment STMT on the return value of the tail |
270 | recursive CALL passed in ASS_VAR. M and A are the multiplicative and the |
271 | additive factor for the real return value. */ |
272 | |
273 | static par |
274 | process_assignment (gassign *stmt, |
275 | gimple_stmt_iterator call, tree *m, |
276 | tree *a, tree *ass_var, bitmap to_move) |
277 | { |
278 | tree op0, op1 = NULL_TREE(tree) nullptr, non_ass_var = NULL_TREE(tree) nullptr; |
279 | tree dest = gimple_assign_lhs (stmt); |
280 | enum tree_code code = gimple_assign_rhs_code (stmt); |
281 | enum gimple_rhs_class rhs_class = get_gimple_rhs_class (code); |
282 | tree src_var = gimple_assign_rhs1 (stmt); |
283 | |
284 | /* See if this is a simple copy operation of an SSA name to the function |
285 | result. In that case we may have a simple tail call. Ignore type |
286 | conversions that can never produce extra code between the function |
287 | call and the function return. */ |
288 | if ((rhs_class == GIMPLE_SINGLE_RHS || gimple_assign_cast_p (stmt)) |
289 | && src_var == *ass_var) |
290 | { |
291 | /* Reject a tailcall if the type conversion might need |
292 | additional code. */ |
293 | if (gimple_assign_cast_p (stmt)) |
294 | { |
295 | if (TYPE_MODE (TREE_TYPE (dest))((((enum tree_code) ((tree_class_check ((((contains_struct_check ((dest), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 295, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 295, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (((contains_struct_check ((dest), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 295, __FUNCTION__))->typed.type)) : (((contains_struct_check ((dest), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 295, __FUNCTION__))->typed.type))->type_common.mode) != TYPE_MODE (TREE_TYPE (src_var))((((enum tree_code) ((tree_class_check ((((contains_struct_check ((src_var), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 295, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 295, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (((contains_struct_check ((src_var), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 295, __FUNCTION__))->typed.type)) : (((contains_struct_check ((src_var), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 295, __FUNCTION__))->typed.type))->type_common.mode)) |
296 | return FAIL; |
297 | |
298 | /* Even if the type modes are the same, if the precision of the |
299 | type is smaller than mode's precision, |
300 | reduce_to_bit_field_precision would generate additional code. */ |
301 | if (INTEGRAL_TYPE_P (TREE_TYPE (dest))(((enum tree_code) (((contains_struct_check ((dest), (TS_TYPED ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 301, __FUNCTION__))->typed.type))->base.code) == ENUMERAL_TYPE || ((enum tree_code) (((contains_struct_check ((dest), (TS_TYPED ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 301, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE || ((enum tree_code) (((contains_struct_check ((dest), (TS_TYPED ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 301, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE ) |
302 | && !type_has_mode_precision_p (TREE_TYPE (dest)((contains_struct_check ((dest), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 302, __FUNCTION__))->typed.type))) |
303 | return FAIL; |
304 | } |
305 | |
306 | *ass_var = dest; |
307 | return OK; |
308 | } |
309 | |
310 | switch (rhs_class) |
311 | { |
312 | case GIMPLE_BINARY_RHS: |
313 | op1 = gimple_assign_rhs2 (stmt); |
314 | |
315 | /* Fall through. */ |
316 | |
317 | case GIMPLE_UNARY_RHS: |
318 | op0 = gimple_assign_rhs1 (stmt); |
319 | break; |
320 | |
321 | default: |
322 | return FAIL; |
323 | } |
324 | |
325 | /* Accumulator optimizations will reverse the order of operations. |
326 | We can only do that for floating-point types if we're assuming |
327 | that addition and multiplication are associative. */ |
328 | if (!flag_associative_mathglobal_options.x_flag_associative_math) |
329 | if (FLOAT_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl)))((((enum tree_code) (((contains_struct_check ((((tree_check ( (current_function_decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 329, __FUNCTION__, (FUNCTION_DECL)))->decl_non_common.result )), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 329, __FUNCTION__))->typed.type))->base.code) == REAL_TYPE ) || ((((enum tree_code) (((contains_struct_check ((((tree_check ((current_function_decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 329, __FUNCTION__, (FUNCTION_DECL)))->decl_non_common.result )), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 329, __FUNCTION__))->typed.type))->base.code) == COMPLEX_TYPE || (((enum tree_code) (((contains_struct_check ((((tree_check ((current_function_decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 329, __FUNCTION__, (FUNCTION_DECL)))->decl_non_common.result )), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 329, __FUNCTION__))->typed.type))->base.code) == VECTOR_TYPE )) && (((enum tree_code) (((contains_struct_check ((( (contains_struct_check ((((tree_check ((current_function_decl ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 329, __FUNCTION__, (FUNCTION_DECL)))->decl_non_common.result )), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 329, __FUNCTION__))->typed.type)), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 329, __FUNCTION__))->typed.type))->base.code) == REAL_TYPE )))) |
330 | return FAIL; |
331 | |
332 | if (rhs_class == GIMPLE_UNARY_RHS |
333 | && op0 == *ass_var) |
334 | ; |
335 | else if (op0 == *ass_var |
336 | && (non_ass_var = independent_of_stmt_p (op1, stmt, call, |
337 | to_move))) |
338 | ; |
339 | else if (*ass_var |
340 | && op1 == *ass_var |
341 | && (non_ass_var = independent_of_stmt_p (op0, stmt, call, |
342 | to_move))) |
343 | ; |
344 | else |
345 | return TRY_MOVE; |
346 | |
347 | switch (code) |
348 | { |
349 | case PLUS_EXPR: |
350 | *a = non_ass_var; |
351 | *ass_var = dest; |
352 | return OK; |
353 | |
354 | case POINTER_PLUS_EXPR: |
355 | if (op0 != *ass_var) |
356 | return FAIL; |
357 | *a = non_ass_var; |
358 | *ass_var = dest; |
359 | return OK; |
360 | |
361 | case MULT_EXPR: |
362 | *m = non_ass_var; |
363 | *ass_var = dest; |
364 | return OK; |
365 | |
366 | case NEGATE_EXPR: |
367 | *m = build_minus_one_cst (TREE_TYPE (op0)((contains_struct_check ((op0), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 367, __FUNCTION__))->typed.type)); |
368 | *ass_var = dest; |
369 | return OK; |
370 | |
371 | case MINUS_EXPR: |
372 | if (*ass_var == op0) |
373 | *a = fold_build1 (NEGATE_EXPR, TREE_TYPE (non_ass_var), non_ass_var)fold_build1_loc (((location_t) 0), NEGATE_EXPR, ((contains_struct_check ((non_ass_var), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 373, __FUNCTION__))->typed.type), non_ass_var ); |
374 | else |
375 | { |
376 | *m = build_minus_one_cst (TREE_TYPE (non_ass_var)((contains_struct_check ((non_ass_var), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 376, __FUNCTION__))->typed.type)); |
377 | *a = fold_build1 (NEGATE_EXPR, TREE_TYPE (non_ass_var), non_ass_var)fold_build1_loc (((location_t) 0), NEGATE_EXPR, ((contains_struct_check ((non_ass_var), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 377, __FUNCTION__))->typed.type), non_ass_var ); |
378 | } |
379 | |
380 | *ass_var = dest; |
381 | return OK; |
382 | |
383 | default: |
384 | return FAIL; |
385 | } |
386 | } |
387 | |
388 | /* Propagate VAR through phis on edge E. */ |
389 | |
390 | static tree |
391 | propagate_through_phis (tree var, edge e) |
392 | { |
393 | basic_block dest = e->dest; |
394 | gphi_iterator gsi; |
395 | |
396 | for (gsi = gsi_start_phis (dest); !gsi_end_p (gsi); gsi_next (&gsi)) |
397 | { |
398 | gphi *phi = gsi.phi (); |
399 | if (PHI_ARG_DEF_FROM_EDGE (phi, e)gimple_phi_arg_def (((phi)), ((e)->dest_idx)) == var) |
400 | return PHI_RESULT (phi)get_def_from_ptr (gimple_phi_result_ptr (phi)); |
401 | } |
402 | return var; |
403 | } |
404 | |
405 | /* Argument for compute_live_vars/live_vars_at_stmt and what compute_live_vars |
406 | returns. Computed lazily, but just once for the function. */ |
407 | static live_vars_map *live_vars; |
408 | static vec<bitmap_head> live_vars_vec; |
409 | |
410 | /* Finds tailcalls falling into basic block BB. The list of found tailcalls is |
411 | added to the start of RET. */ |
412 | |
413 | static void |
414 | find_tail_calls (basic_block bb, struct tailcall **ret) |
415 | { |
416 | tree ass_var = NULL_TREE(tree) nullptr, ret_var, func, param; |
417 | gimple *stmt; |
418 | gcall *call = NULLnullptr; |
419 | gimple_stmt_iterator gsi, agsi; |
420 | bool tail_recursion; |
421 | struct tailcall *nw; |
422 | edge e; |
423 | tree m, a; |
424 | basic_block abb; |
425 | size_t idx; |
426 | tree var; |
427 | |
428 | if (!single_succ_p (bb)) |
429 | return; |
430 | |
431 | for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi)) |
432 | { |
433 | stmt = gsi_stmt (gsi); |
434 | |
435 | /* Ignore labels, returns, nops, clobbers and debug stmts. */ |
436 | if (gimple_code (stmt) == GIMPLE_LABEL |
437 | || gimple_code (stmt) == GIMPLE_RETURN |
438 | || gimple_code (stmt) == GIMPLE_NOP |
439 | || gimple_code (stmt) == GIMPLE_PREDICT |
440 | || gimple_clobber_p (stmt) |
441 | || is_gimple_debug (stmt)) |
442 | continue; |
443 | |
444 | /* Check for a call. */ |
445 | if (is_gimple_call (stmt)) |
446 | { |
447 | call = as_a <gcall *> (stmt); |
448 | ass_var = gimple_call_lhs (call); |
449 | break; |
450 | } |
451 | |
452 | /* Allow simple copies between local variables, even if they're |
453 | aggregates. */ |
454 | if (is_gimple_assign (stmt) |
455 | && auto_var_in_fn_p (gimple_assign_lhs (stmt), cfun(cfun + 0)->decl) |
456 | && auto_var_in_fn_p (gimple_assign_rhs1 (stmt), cfun(cfun + 0)->decl)) |
457 | continue; |
458 | |
459 | /* If the statement references memory or volatile operands, fail. */ |
460 | if (gimple_references_memory_p (stmt) |
461 | || gimple_has_volatile_ops (stmt)) |
462 | return; |
463 | } |
464 | |
465 | if (gsi_end_p (gsi)) |
466 | { |
467 | edge_iterator ei; |
468 | /* Recurse to the predecessors. */ |
469 | FOR_EACH_EDGE (e, ei, bb->preds)for ((ei) = ei_start_1 (&((bb->preds))); ei_cond ((ei) , &(e)); ei_next (&(ei))) |
470 | find_tail_calls (e->src, ret); |
471 | |
472 | return; |
473 | } |
474 | |
475 | /* If the LHS of our call is not just a simple register or local |
476 | variable, we can't transform this into a tail or sibling call. |
477 | This situation happens, in (e.g.) "*p = foo()" where foo returns a |
478 | struct. In this case we won't have a temporary here, but we need |
479 | to carry out the side effect anyway, so tailcall is impossible. |
480 | |
481 | ??? In some situations (when the struct is returned in memory via |
482 | invisible argument) we could deal with this, e.g. by passing 'p' |
483 | itself as that argument to foo, but it's too early to do this here, |
484 | and expand_call() will not handle it anyway. If it ever can, then |
485 | we need to revisit this here, to allow that situation. */ |
486 | if (ass_var |
487 | && !is_gimple_reg (ass_var) |
488 | && !auto_var_in_fn_p (ass_var, cfun(cfun + 0)->decl)) |
489 | return; |
490 | |
491 | /* If the call might throw an exception that wouldn't propagate out of |
492 | cfun, we can't transform to a tail or sibling call (82081). */ |
493 | if (stmt_could_throw_p (cfun(cfun + 0), stmt) |
494 | && !stmt_can_throw_external (cfun(cfun + 0), stmt)) |
495 | return; |
496 | |
497 | /* If the function returns a value, then at present, the tail call |
498 | must return the same type of value. There is conceptually a copy |
499 | between the object returned by the tail call candidate and the |
500 | object returned by CFUN itself. |
501 | |
502 | This means that if we have: |
503 | |
504 | lhs = f (&<retval>); // f reads from <retval> |
505 | // (lhs is usually also <retval>) |
506 | |
507 | there is a copy between the temporary object returned by f and lhs, |
508 | meaning that any use of <retval> in f occurs before the assignment |
509 | to lhs begins. Thus the <retval> that is live on entry to the call |
510 | to f is really an independent local variable V that happens to be |
511 | stored in the RESULT_DECL rather than a local VAR_DECL. |
512 | |
513 | Turning this into a tail call would remove the copy and make the |
514 | lifetimes of the return value and V overlap. The same applies to |
515 | tail recursion, since if f can read from <retval>, we have to assume |
516 | that CFUN might already have written to <retval> before the call. |
517 | |
518 | The problem doesn't apply when <retval> is passed by value, but that |
519 | isn't a case we handle anyway. */ |
520 | tree result_decl = DECL_RESULT (cfun->decl)((tree_check (((cfun + 0)->decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 520, __FUNCTION__, (FUNCTION_DECL)))->decl_non_common.result ); |
521 | if (result_decl |
522 | && may_be_aliased (result_decl) |
523 | && ref_maybe_used_by_stmt_p (call, result_decl, false)) |
524 | return; |
525 | |
526 | /* We found the call, check whether it is suitable. */ |
527 | tail_recursion = false; |
528 | func = gimple_call_fndecl (call); |
529 | if (func |
530 | && !fndecl_built_in_p (func) |
531 | && recursive_call_p (current_function_decl, func)) |
532 | { |
533 | tree arg; |
534 | |
535 | for (param = DECL_ARGUMENTS (current_function_decl)((tree_check ((current_function_decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 535, __FUNCTION__, (FUNCTION_DECL)))->function_decl.arguments ), idx = 0; |
536 | param && idx < gimple_call_num_args (call); |
537 | param = DECL_CHAIN (param)(((contains_struct_check (((contains_struct_check ((param), ( TS_DECL_MINIMAL), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 537, __FUNCTION__))), (TS_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 537, __FUNCTION__))->common.chain)), idx ++) |
538 | { |
539 | arg = gimple_call_arg (call, idx); |
540 | if (param != arg) |
541 | { |
542 | /* Make sure there are no problems with copying. The parameter |
543 | have a copyable type and the two arguments must have reasonably |
544 | equivalent types. The latter requirement could be relaxed if |
545 | we emitted a suitable type conversion statement. */ |
546 | if (!is_gimple_reg_type (TREE_TYPE (param)((contains_struct_check ((param), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 546, __FUNCTION__))->typed.type)) |
547 | || !useless_type_conversion_p (TREE_TYPE (param)((contains_struct_check ((param), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 547, __FUNCTION__))->typed.type), |
548 | TREE_TYPE (arg)((contains_struct_check ((arg), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 548, __FUNCTION__))->typed.type))) |
549 | break; |
550 | |
551 | /* The parameter should be a real operand, so that phi node |
552 | created for it at the start of the function has the meaning |
553 | of copying the value. This test implies is_gimple_reg_type |
554 | from the previous condition, however this one could be |
555 | relaxed by being more careful with copying the new value |
556 | of the parameter (emitting appropriate GIMPLE_ASSIGN and |
557 | updating the virtual operands). */ |
558 | if (!is_gimple_reg (param)) |
559 | break; |
560 | } |
561 | } |
562 | if (idx == gimple_call_num_args (call) && !param) |
563 | tail_recursion = true; |
564 | } |
565 | |
566 | /* Compute live vars if not computed yet. */ |
567 | if (live_vars == NULLnullptr) |
568 | { |
569 | unsigned int cnt = 0; |
570 | FOR_EACH_LOCAL_DECL (cfun, idx, var)for (idx = vec_safe_length (((cfun + 0))->local_decls) - 1 ; vec_safe_iterate ((((cfun + 0))->local_decls), (idx), & (var)); (idx)--) |
571 | if (VAR_P (var)(((enum tree_code) (var)->base.code) == VAR_DECL) |
572 | && auto_var_in_fn_p (var, cfun(cfun + 0)->decl) |
573 | && may_be_aliased (var)) |
574 | { |
575 | if (live_vars == NULLnullptr) |
576 | live_vars = new live_vars_map; |
577 | live_vars->put (DECL_UID (var)((contains_struct_check ((var), (TS_DECL_MINIMAL), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 577, __FUNCTION__))->decl_minimal.uid), cnt++); |
578 | } |
579 | if (live_vars) |
580 | live_vars_vec = compute_live_vars (cfun(cfun + 0), live_vars); |
581 | } |
582 | |
583 | /* Determine a bitmap of variables which are still in scope after the |
584 | call. */ |
585 | bitmap local_live_vars = NULLnullptr; |
586 | if (live_vars) |
587 | local_live_vars = live_vars_at_stmt (live_vars_vec, live_vars, call); |
588 | |
589 | /* Make sure the tail invocation of this function does not indirectly |
590 | refer to local variables. (Passing variables directly by value |
591 | is OK.) */ |
592 | FOR_EACH_LOCAL_DECL (cfun, idx, var)for (idx = vec_safe_length (((cfun + 0))->local_decls) - 1 ; vec_safe_iterate ((((cfun + 0))->local_decls), (idx), & (var)); (idx)--) |
593 | { |
594 | if (TREE_CODE (var)((enum tree_code) (var)->base.code) != PARM_DECL |
595 | && auto_var_in_fn_p (var, cfun(cfun + 0)->decl) |
596 | && may_be_aliased (var) |
597 | && (ref_maybe_used_by_stmt_p (call, var, false) |
598 | || call_may_clobber_ref_p (call, var, false))) |
599 | { |
600 | if (!VAR_P (var)(((enum tree_code) (var)->base.code) == VAR_DECL)) |
601 | { |
602 | if (local_live_vars) |
603 | BITMAP_FREE (local_live_vars)((void) (bitmap_obstack_free ((bitmap) local_live_vars), (local_live_vars ) = (bitmap) nullptr)); |
604 | return; |
605 | } |
606 | else |
607 | { |
608 | unsigned int *v = live_vars->get (DECL_UID (var)((contains_struct_check ((var), (TS_DECL_MINIMAL), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 608, __FUNCTION__))->decl_minimal.uid)); |
609 | if (bitmap_bit_p (local_live_vars, *v)) |
610 | { |
611 | BITMAP_FREE (local_live_vars)((void) (bitmap_obstack_free ((bitmap) local_live_vars), (local_live_vars ) = (bitmap) nullptr)); |
612 | return; |
613 | } |
614 | } |
615 | } |
616 | } |
617 | |
618 | if (local_live_vars) |
619 | BITMAP_FREE (local_live_vars)((void) (bitmap_obstack_free ((bitmap) local_live_vars), (local_live_vars ) = (bitmap) nullptr)); |
620 | |
621 | /* Now check the statements after the call. None of them has virtual |
622 | operands, so they may only depend on the call through its return |
623 | value. The return value should also be dependent on each of them, |
624 | since we are running after dce. */ |
625 | m = NULL_TREE(tree) nullptr; |
626 | a = NULL_TREE(tree) nullptr; |
627 | auto_bitmap to_move_defs; |
628 | auto_vec<gimple *> to_move_stmts; |
629 | |
630 | abb = bb; |
631 | agsi = gsi; |
632 | while (1) |
633 | { |
634 | tree tmp_a = NULL_TREE(tree) nullptr; |
635 | tree tmp_m = NULL_TREE(tree) nullptr; |
636 | gsi_next (&agsi); |
637 | |
638 | while (gsi_end_p (agsi)) |
639 | { |
640 | ass_var = propagate_through_phis (ass_var, single_succ_edge (abb)); |
641 | abb = single_succ (abb); |
642 | agsi = gsi_start_bb (abb); |
643 | } |
644 | |
645 | stmt = gsi_stmt (agsi); |
646 | if (gimple_code (stmt) == GIMPLE_RETURN) |
647 | break; |
648 | |
649 | if (gimple_code (stmt) == GIMPLE_LABEL |
650 | || gimple_code (stmt) == GIMPLE_NOP |
651 | || gimple_code (stmt) == GIMPLE_PREDICT |
652 | || gimple_clobber_p (stmt) |
653 | || is_gimple_debug (stmt)) |
654 | continue; |
655 | |
656 | if (gimple_code (stmt) != GIMPLE_ASSIGN) |
657 | return; |
658 | |
659 | /* This is a gimple assign. */ |
660 | par ret = process_assignment (as_a <gassign *> (stmt), gsi, |
661 | &tmp_m, &tmp_a, &ass_var, to_move_defs); |
662 | if (ret == FAIL) |
663 | return; |
664 | else if (ret == TRY_MOVE) |
665 | { |
666 | if (! tail_recursion) |
667 | return; |
668 | /* Do not deal with checking dominance, the real fix is to |
669 | do path isolation for the transform phase anyway, removing |
670 | the need to compute the accumulators with new stmts. */ |
671 | if (abb != bb) |
672 | return; |
673 | for (unsigned opno = 1; opno < gimple_num_ops (stmt); ++opno) |
674 | { |
675 | tree op = gimple_op (stmt, opno); |
676 | if (independent_of_stmt_p (op, stmt, gsi, to_move_defs) != op) |
677 | return; |
678 | } |
679 | bitmap_set_bit (to_move_defs, |
680 | SSA_NAME_VERSION (gimple_assign_lhs (stmt))(tree_check ((gimple_assign_lhs (stmt)), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 680, __FUNCTION__, (SSA_NAME)))->base.u.version); |
681 | to_move_stmts.safe_push (stmt); |
682 | continue; |
683 | } |
684 | |
685 | if (tmp_a) |
686 | { |
687 | tree type = TREE_TYPE (tmp_a)((contains_struct_check ((tmp_a), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 687, __FUNCTION__))->typed.type); |
688 | if (a) |
689 | a = fold_build2 (PLUS_EXPR, type, fold_convert (type, a), tmp_a)fold_build2_loc (((location_t) 0), PLUS_EXPR, type, fold_convert_loc (((location_t) 0), type, a), tmp_a ); |
690 | else |
691 | a = tmp_a; |
692 | } |
693 | if (tmp_m) |
694 | { |
695 | tree type = TREE_TYPE (tmp_m)((contains_struct_check ((tmp_m), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 695, __FUNCTION__))->typed.type); |
696 | if (m) |
697 | m = fold_build2 (MULT_EXPR, type, fold_convert (type, m), tmp_m)fold_build2_loc (((location_t) 0), MULT_EXPR, type, fold_convert_loc (((location_t) 0), type, m), tmp_m ); |
698 | else |
699 | m = tmp_m; |
700 | |
701 | if (a) |
702 | a = fold_build2 (MULT_EXPR, type, fold_convert (type, a), tmp_m)fold_build2_loc (((location_t) 0), MULT_EXPR, type, fold_convert_loc (((location_t) 0), type, a), tmp_m ); |
703 | } |
704 | } |
705 | |
706 | /* See if this is a tail call we can handle. */ |
707 | ret_var = gimple_return_retval (as_a <greturn *> (stmt)); |
708 | |
709 | /* We may proceed if there either is no return value, or the return value |
710 | is identical to the call's return or if the return decl is an empty type |
711 | variable and the call's return was not assigned. */ |
712 | if (ret_var |
713 | && (ret_var != ass_var |
714 | && !(is_empty_type (TREE_TYPE (ret_var)((contains_struct_check ((ret_var), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 714, __FUNCTION__))->typed.type)) && !ass_var))) |
715 | return; |
716 | |
717 | /* If this is not a tail recursive call, we cannot handle addends or |
718 | multiplicands. */ |
719 | if (!tail_recursion && (m || a)) |
720 | return; |
721 | |
722 | /* For pointers only allow additions. */ |
723 | if (m && POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl)))(((enum tree_code) (((contains_struct_check ((((tree_check (( current_function_decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 723, __FUNCTION__, (FUNCTION_DECL)))->decl_non_common.result )), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 723, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE || ((enum tree_code) (((contains_struct_check ((((tree_check ((current_function_decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 723, __FUNCTION__, (FUNCTION_DECL)))->decl_non_common.result )), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 723, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE )) |
724 | return; |
725 | |
726 | /* Move queued defs. */ |
727 | if (tail_recursion) |
728 | { |
729 | unsigned i; |
730 | FOR_EACH_VEC_ELT (to_move_stmts, i, stmt)for (i = 0; (to_move_stmts).iterate ((i), &(stmt)); ++(i) ) |
731 | { |
732 | gimple_stmt_iterator mgsi = gsi_for_stmt (stmt); |
733 | gsi_move_before (&mgsi, &gsi); |
734 | } |
735 | if (!tailr_arg_needs_copy) |
736 | tailr_arg_needs_copy = BITMAP_ALLOCbitmap_alloc (NULLnullptr); |
737 | for (param = DECL_ARGUMENTS (current_function_decl)((tree_check ((current_function_decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 737, __FUNCTION__, (FUNCTION_DECL)))->function_decl.arguments ), idx = 0; |
738 | param; |
739 | param = DECL_CHAIN (param)(((contains_struct_check (((contains_struct_check ((param), ( TS_DECL_MINIMAL), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 739, __FUNCTION__))), (TS_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 739, __FUNCTION__))->common.chain)), idx++) |
740 | { |
741 | tree ddef, arg = gimple_call_arg (call, idx); |
742 | if (is_gimple_reg (param) |
743 | && (ddef = ssa_default_def (cfun(cfun + 0), param)) |
744 | && (arg != ddef)) |
745 | bitmap_set_bit (tailr_arg_needs_copy, idx); |
746 | } |
747 | } |
748 | |
749 | nw = XNEW (struct tailcall)((struct tailcall *) xmalloc (sizeof (struct tailcall))); |
750 | |
751 | nw->call_gsi = gsi; |
752 | |
753 | nw->tail_recursion = tail_recursion; |
754 | |
755 | nw->mult = m; |
756 | nw->add = a; |
757 | |
758 | nw->next = *ret; |
759 | *ret = nw; |
760 | } |
761 | |
762 | /* Helper to insert PHI_ARGH to the phi of VAR in the destination of edge E. */ |
763 | |
764 | static void |
765 | add_successor_phi_arg (edge e, tree var, tree phi_arg) |
766 | { |
767 | gphi_iterator gsi; |
768 | |
769 | for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi)) |
770 | if (PHI_RESULT (gsi.phi ())get_def_from_ptr (gimple_phi_result_ptr (gsi.phi ())) == var) |
771 | break; |
772 | |
773 | gcc_assert (!gsi_end_p (gsi))((void)(!(!gsi_end_p (gsi)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 773, __FUNCTION__), 0 : 0)); |
774 | add_phi_arg (gsi.phi (), phi_arg, e, UNKNOWN_LOCATION((location_t) 0)); |
775 | } |
776 | |
777 | /* Creates a GIMPLE statement which computes the operation specified by |
778 | CODE, ACC and OP1 to a new variable with name LABEL and inserts the |
779 | statement in the position specified by GSI. Returns the |
780 | tree node of the statement's result. */ |
781 | |
782 | static tree |
783 | adjust_return_value_with_ops (enum tree_code code, const char *label, |
784 | tree acc, tree op1, gimple_stmt_iterator gsi) |
785 | { |
786 | |
787 | tree ret_type = TREE_TYPE (DECL_RESULT (current_function_decl))((contains_struct_check ((((tree_check ((current_function_decl ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 787, __FUNCTION__, (FUNCTION_DECL)))->decl_non_common.result )), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 787, __FUNCTION__))->typed.type); |
788 | tree result = make_temp_ssa_name (ret_type, NULLnullptr, label); |
789 | gassign *stmt; |
790 | |
791 | if (POINTER_TYPE_P (ret_type)(((enum tree_code) (ret_type)->base.code) == POINTER_TYPE || ((enum tree_code) (ret_type)->base.code) == REFERENCE_TYPE )) |
792 | { |
793 | gcc_assert (code == PLUS_EXPR && TREE_TYPE (acc) == sizetype)((void)(!(code == PLUS_EXPR && ((contains_struct_check ((acc), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 793, __FUNCTION__))->typed.type) == sizetype_tab[(int) stk_sizetype ]) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 793, __FUNCTION__), 0 : 0)); |
794 | code = POINTER_PLUS_EXPR; |
795 | } |
796 | if (types_compatible_p (TREE_TYPE (acc)((contains_struct_check ((acc), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 796, __FUNCTION__))->typed.type), TREE_TYPE (op1)((contains_struct_check ((op1), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 796, __FUNCTION__))->typed.type)) |
797 | && code != POINTER_PLUS_EXPR) |
798 | stmt = gimple_build_assign (result, code, acc, op1); |
799 | else |
800 | { |
801 | tree tem; |
802 | if (code == POINTER_PLUS_EXPR) |
803 | tem = fold_build2 (code, TREE_TYPE (op1), op1, acc)fold_build2_loc (((location_t) 0), code, ((contains_struct_check ((op1), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 803, __FUNCTION__))->typed.type), op1, acc ); |
804 | else |
805 | tem = fold_build2 (code, TREE_TYPE (op1),fold_build2_loc (((location_t) 0), code, ((contains_struct_check ((op1), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 805, __FUNCTION__))->typed.type), fold_convert_loc (((location_t ) 0), ((contains_struct_check ((op1), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 806, __FUNCTION__))->typed.type), acc), op1 ) |
806 | fold_convert (TREE_TYPE (op1), acc), op1)fold_build2_loc (((location_t) 0), code, ((contains_struct_check ((op1), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 805, __FUNCTION__))->typed.type), fold_convert_loc (((location_t ) 0), ((contains_struct_check ((op1), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 806, __FUNCTION__))->typed.type), acc), op1 ); |
807 | tree rhs = fold_convert (ret_type, tem)fold_convert_loc (((location_t) 0), ret_type, tem); |
808 | rhs = force_gimple_operand_gsi (&gsi, rhs, |
809 | false, NULLnullptr, true, GSI_SAME_STMT); |
810 | stmt = gimple_build_assign (result, rhs); |
811 | } |
812 | |
813 | gsi_insert_before (&gsi, stmt, GSI_NEW_STMT); |
814 | return result; |
815 | } |
816 | |
817 | /* Creates a new GIMPLE statement that adjusts the value of accumulator ACC by |
818 | the computation specified by CODE and OP1 and insert the statement |
819 | at the position specified by GSI as a new statement. Returns new SSA name |
820 | of updated accumulator. */ |
821 | |
822 | static tree |
823 | update_accumulator_with_ops (enum tree_code code, tree acc, tree op1, |
824 | gimple_stmt_iterator gsi) |
825 | { |
826 | gassign *stmt; |
827 | tree var = copy_ssa_name (acc); |
828 | if (types_compatible_p (TREE_TYPE (acc)((contains_struct_check ((acc), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 828, __FUNCTION__))->typed.type), TREE_TYPE (op1)((contains_struct_check ((op1), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 828, __FUNCTION__))->typed.type))) |
829 | stmt = gimple_build_assign (var, code, acc, op1); |
830 | else |
831 | { |
832 | tree rhs = fold_convert (TREE_TYPE (acc),fold_convert_loc (((location_t) 0), ((contains_struct_check ( (acc), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 832, __FUNCTION__))->typed.type), fold_build2_loc (((location_t ) 0), code, ((contains_struct_check ((op1), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 834, __FUNCTION__))->typed.type), fold_convert_loc (((location_t ) 0), ((contains_struct_check ((op1), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 835, __FUNCTION__))->typed.type), acc), op1 )) |
833 | fold_build2 (code,fold_convert_loc (((location_t) 0), ((contains_struct_check ( (acc), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 832, __FUNCTION__))->typed.type), fold_build2_loc (((location_t ) 0), code, ((contains_struct_check ((op1), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 834, __FUNCTION__))->typed.type), fold_convert_loc (((location_t ) 0), ((contains_struct_check ((op1), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 835, __FUNCTION__))->typed.type), acc), op1 )) |
834 | TREE_TYPE (op1),fold_convert_loc (((location_t) 0), ((contains_struct_check ( (acc), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 832, __FUNCTION__))->typed.type), fold_build2_loc (((location_t ) 0), code, ((contains_struct_check ((op1), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 834, __FUNCTION__))->typed.type), fold_convert_loc (((location_t ) 0), ((contains_struct_check ((op1), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 835, __FUNCTION__))->typed.type), acc), op1 )) |
835 | fold_convert (TREE_TYPE (op1), acc),fold_convert_loc (((location_t) 0), ((contains_struct_check ( (acc), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 832, __FUNCTION__))->typed.type), fold_build2_loc (((location_t ) 0), code, ((contains_struct_check ((op1), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 834, __FUNCTION__))->typed.type), fold_convert_loc (((location_t ) 0), ((contains_struct_check ((op1), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 835, __FUNCTION__))->typed.type), acc), op1 )) |
836 | op1))fold_convert_loc (((location_t) 0), ((contains_struct_check ( (acc), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 832, __FUNCTION__))->typed.type), fold_build2_loc (((location_t ) 0), code, ((contains_struct_check ((op1), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 834, __FUNCTION__))->typed.type), fold_convert_loc (((location_t ) 0), ((contains_struct_check ((op1), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 835, __FUNCTION__))->typed.type), acc), op1 )); |
837 | rhs = force_gimple_operand_gsi (&gsi, rhs, |
838 | false, NULLnullptr, false, GSI_CONTINUE_LINKING); |
839 | stmt = gimple_build_assign (var, rhs); |
840 | } |
841 | gsi_insert_after (&gsi, stmt, GSI_NEW_STMT); |
842 | return var; |
843 | } |
844 | |
845 | /* Adjust the accumulator values according to A and M after GSI, and update |
846 | the phi nodes on edge BACK. */ |
847 | |
848 | static void |
849 | adjust_accumulator_values (gimple_stmt_iterator gsi, tree m, tree a, edge back) |
850 | { |
851 | tree var, a_acc_arg, m_acc_arg; |
852 | |
853 | if (m) |
854 | m = force_gimple_operand_gsi (&gsi, m, true, NULLnullptr, true, GSI_SAME_STMT); |
855 | if (a) |
856 | a = force_gimple_operand_gsi (&gsi, a, true, NULLnullptr, true, GSI_SAME_STMT); |
857 | |
858 | a_acc_arg = a_acc; |
859 | m_acc_arg = m_acc; |
860 | if (a) |
861 | { |
862 | if (m_acc) |
863 | { |
864 | if (integer_onep (a)) |
865 | var = m_acc; |
866 | else |
867 | var = adjust_return_value_with_ops (MULT_EXPR, "acc_tmp", m_acc, |
868 | a, gsi); |
869 | } |
870 | else |
871 | var = a; |
872 | |
873 | a_acc_arg = update_accumulator_with_ops (PLUS_EXPR, a_acc, var, gsi); |
874 | } |
875 | |
876 | if (m) |
877 | m_acc_arg = update_accumulator_with_ops (MULT_EXPR, m_acc, m, gsi); |
878 | |
879 | if (a_acc) |
880 | add_successor_phi_arg (back, a_acc, a_acc_arg); |
881 | |
882 | if (m_acc) |
883 | add_successor_phi_arg (back, m_acc, m_acc_arg); |
884 | } |
885 | |
886 | /* Adjust value of the return at the end of BB according to M and A |
887 | accumulators. */ |
888 | |
889 | static void |
890 | adjust_return_value (basic_block bb, tree m, tree a) |
891 | { |
892 | tree retval; |
893 | greturn *ret_stmt = as_a <greturn *> (gimple_seq_last_stmt (bb_seq (bb))); |
894 | gimple_stmt_iterator gsi = gsi_last_bb (bb); |
895 | |
896 | gcc_assert (gimple_code (ret_stmt) == GIMPLE_RETURN)((void)(!(gimple_code (ret_stmt) == GIMPLE_RETURN) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 896, __FUNCTION__), 0 : 0)); |
897 | |
898 | retval = gimple_return_retval (ret_stmt); |
899 | if (!retval || retval == error_mark_nodeglobal_trees[TI_ERROR_MARK]) |
900 | return; |
901 | |
902 | if (m) |
903 | retval = adjust_return_value_with_ops (MULT_EXPR, "mul_tmp", m_acc, retval, |
904 | gsi); |
905 | if (a) |
906 | retval = adjust_return_value_with_ops (PLUS_EXPR, "acc_tmp", a_acc, retval, |
907 | gsi); |
908 | gimple_return_set_retval (ret_stmt, retval); |
909 | update_stmt (ret_stmt); |
910 | } |
911 | |
912 | /* Subtract COUNT and FREQUENCY from the basic block and it's |
913 | outgoing edge. */ |
914 | static void |
915 | decrease_profile (basic_block bb, profile_count count) |
916 | { |
917 | bb->count = bb->count - count; |
918 | if (!single_succ_p (bb)) |
919 | { |
920 | gcc_assert (!EDGE_COUNT (bb->succs))((void)(!(!vec_safe_length (bb->succs)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 920, __FUNCTION__), 0 : 0)); |
921 | return; |
922 | } |
923 | } |
924 | |
925 | /* Eliminates tail call described by T. TMP_VARS is a list of |
926 | temporary variables used to copy the function arguments. |
927 | Allocates *NEW_LOOP if not already done and initializes it. */ |
928 | |
929 | static void |
930 | eliminate_tail_call (struct tailcall *t, class loop *&new_loop) |
931 | { |
932 | tree param, rslt; |
933 | gimple *stmt, *call; |
934 | tree arg; |
935 | size_t idx; |
936 | basic_block bb, first; |
937 | edge e; |
938 | gphi *phi; |
939 | gphi_iterator gpi; |
940 | gimple_stmt_iterator gsi; |
941 | gimple *orig_stmt; |
942 | |
943 | stmt = orig_stmt = gsi_stmt (t->call_gsi); |
Although the value stored to 'orig_stmt' is used in the enclosing expression, the value is never actually read from 'orig_stmt' | |
944 | bb = gsi_bb (t->call_gsi); |
945 | |
946 | if (dump_file && (dump_flags & TDF_DETAILS)) |
947 | { |
948 | fprintf (dump_file, "Eliminated tail recursion in bb %d : ", |
949 | bb->index); |
950 | print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM); |
951 | fprintf (dump_file, "\n"); |
952 | } |
953 | |
954 | gcc_assert (is_gimple_call (stmt))((void)(!(is_gimple_call (stmt)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 954, __FUNCTION__), 0 : 0)); |
955 | |
956 | first = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_entry_block_ptr)); |
957 | |
958 | /* Remove the code after call_gsi that will become unreachable. The |
959 | possibly unreachable code in other blocks is removed later in |
960 | cfg cleanup. */ |
961 | gsi = t->call_gsi; |
962 | gimple_stmt_iterator gsi2 = gsi_last_bb (gimple_bb (gsi_stmt (gsi))); |
963 | while (gsi_stmt (gsi2) != gsi_stmt (gsi)) |
964 | { |
965 | gimple *t = gsi_stmt (gsi2); |
966 | /* Do not remove the return statement, so that redirect_edge_and_branch |
967 | sees how the block ends. */ |
968 | if (gimple_code (t) != GIMPLE_RETURN) |
969 | { |
970 | gimple_stmt_iterator gsi3 = gsi2; |
971 | gsi_prev (&gsi2); |
972 | gsi_remove (&gsi3, true); |
973 | release_defs (t); |
974 | } |
975 | else |
976 | gsi_prev (&gsi2); |
977 | } |
978 | |
979 | /* Number of executions of function has reduced by the tailcall. */ |
980 | e = single_succ_edge (gsi_bb (t->call_gsi)); |
981 | |
982 | profile_count count = e->count (); |
983 | |
984 | /* When profile is inconsistent and the recursion edge is more frequent |
985 | than number of executions of functions, scale it down, so we do not end |
986 | up with 0 executions of entry block. */ |
987 | if (count >= ENTRY_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_entry_block_ptr)->count) |
988 | count = ENTRY_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_entry_block_ptr)->count.apply_scale (7, 8); |
989 | decrease_profile (EXIT_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_exit_block_ptr), count); |
990 | decrease_profile (ENTRY_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_entry_block_ptr), count); |
991 | if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_exit_block_ptr)) |
992 | decrease_profile (e->dest, count); |
993 | |
994 | /* Replace the call by a jump to the start of function. */ |
995 | e = redirect_edge_and_branch (single_succ_edge (gsi_bb (t->call_gsi)), |
996 | first); |
997 | gcc_assert (e)((void)(!(e) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 997, __FUNCTION__), 0 : 0)); |
998 | PENDING_STMT (e)((e)->insns.g) = NULLnullptr; |
999 | |
1000 | /* Add the new loop. */ |
1001 | if (!new_loop) |
1002 | { |
1003 | new_loop = alloc_loop (); |
1004 | new_loop->header = first; |
1005 | new_loop->finite_p = true; |
1006 | } |
1007 | else |
1008 | gcc_assert (new_loop->header == first)((void)(!(new_loop->header == first) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 1008, __FUNCTION__), 0 : 0)); |
1009 | |
1010 | /* Add phi node entries for arguments. The ordering of the phi nodes should |
1011 | be the same as the ordering of the arguments. */ |
1012 | for (param = DECL_ARGUMENTS (current_function_decl)((tree_check ((current_function_decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 1012, __FUNCTION__, (FUNCTION_DECL)))->function_decl.arguments ), |
1013 | idx = 0, gpi = gsi_start_phis (first); |
1014 | param; |
1015 | param = DECL_CHAIN (param)(((contains_struct_check (((contains_struct_check ((param), ( TS_DECL_MINIMAL), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 1015, __FUNCTION__))), (TS_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 1015, __FUNCTION__))->common.chain)), idx++) |
1016 | { |
1017 | if (!bitmap_bit_p (tailr_arg_needs_copy, idx)) |
1018 | continue; |
1019 | |
1020 | arg = gimple_call_arg (stmt, idx); |
1021 | phi = gpi.phi (); |
1022 | gcc_assert (param == SSA_NAME_VAR (PHI_RESULT (phi)))((void)(!(param == ((tree_check ((get_def_from_ptr (gimple_phi_result_ptr (phi))), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 1022, __FUNCTION__, (SSA_NAME)))->ssa_name.var == (tree) nullptr || ((enum tree_code) ((get_def_from_ptr (gimple_phi_result_ptr (phi)))->ssa_name.var)->base.code) == IDENTIFIER_NODE ? (tree) nullptr : (get_def_from_ptr (gimple_phi_result_ptr (phi )))->ssa_name.var)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 1022, __FUNCTION__), 0 : 0)); |
1023 | |
1024 | add_phi_arg (phi, arg, e, gimple_location (stmt)); |
1025 | gsi_next (&gpi); |
1026 | } |
1027 | |
1028 | /* Update the values of accumulators. */ |
1029 | adjust_accumulator_values (t->call_gsi, t->mult, t->add, e); |
1030 | |
1031 | call = gsi_stmt (t->call_gsi); |
1032 | rslt = gimple_call_lhs (call); |
1033 | if (rslt != NULL_TREE(tree) nullptr && TREE_CODE (rslt)((enum tree_code) (rslt)->base.code) == SSA_NAME) |
1034 | { |
1035 | /* Result of the call will no longer be defined. So adjust the |
1036 | SSA_NAME_DEF_STMT accordingly. */ |
1037 | SSA_NAME_DEF_STMT (rslt)(tree_check ((rslt), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 1037, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt = gimple_build_nop (); |
1038 | } |
1039 | |
1040 | gsi_remove (&t->call_gsi, true); |
1041 | release_defs (call); |
1042 | } |
1043 | |
1044 | /* Optimizes the tailcall described by T. If OPT_TAILCALLS is true, also |
1045 | mark the tailcalls for the sibcall optimization. */ |
1046 | |
1047 | static bool |
1048 | optimize_tail_call (struct tailcall *t, bool opt_tailcalls, |
1049 | class loop *&new_loop) |
1050 | { |
1051 | if (t->tail_recursion) |
1052 | { |
1053 | eliminate_tail_call (t, new_loop); |
1054 | return true; |
1055 | } |
1056 | |
1057 | if (opt_tailcalls) |
1058 | { |
1059 | gcall *stmt = as_a <gcall *> (gsi_stmt (t->call_gsi)); |
1060 | |
1061 | gimple_call_set_tail (stmt, true); |
1062 | cfun(cfun + 0)->tail_call_marked = true; |
1063 | if (dump_file && (dump_flags & TDF_DETAILS)) |
1064 | { |
1065 | fprintf (dump_file, "Found tail call "); |
1066 | print_gimple_stmt (dump_file, stmt, 0, dump_flags); |
1067 | fprintf (dump_file, " in bb %i\n", (gsi_bb (t->call_gsi))->index); |
1068 | } |
1069 | } |
1070 | |
1071 | return false; |
1072 | } |
1073 | |
1074 | /* Creates a tail-call accumulator of the same type as the return type of the |
1075 | current function. LABEL is the name used to creating the temporary |
1076 | variable for the accumulator. The accumulator will be inserted in the |
1077 | phis of a basic block BB with single predecessor with an initial value |
1078 | INIT converted to the current function return type. */ |
1079 | |
1080 | static tree |
1081 | create_tailcall_accumulator (const char *label, basic_block bb, tree init) |
1082 | { |
1083 | tree ret_type = TREE_TYPE (DECL_RESULT (current_function_decl))((contains_struct_check ((((tree_check ((current_function_decl ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 1083, __FUNCTION__, (FUNCTION_DECL)))->decl_non_common.result )), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 1083, __FUNCTION__))->typed.type); |
1084 | if (POINTER_TYPE_P (ret_type)(((enum tree_code) (ret_type)->base.code) == POINTER_TYPE || ((enum tree_code) (ret_type)->base.code) == REFERENCE_TYPE )) |
1085 | ret_type = sizetypesizetype_tab[(int) stk_sizetype]; |
1086 | |
1087 | tree tmp = make_temp_ssa_name (ret_type, NULLnullptr, label); |
1088 | gphi *phi; |
1089 | |
1090 | phi = create_phi_node (tmp, bb); |
1091 | add_phi_arg (phi, init, single_pred_edge (bb), |
1092 | UNKNOWN_LOCATION((location_t) 0)); |
1093 | return PHI_RESULT (phi)get_def_from_ptr (gimple_phi_result_ptr (phi)); |
1094 | } |
1095 | |
1096 | /* Optimizes tail calls in the function, turning the tail recursion |
1097 | into iteration. */ |
1098 | |
1099 | static unsigned int |
1100 | tree_optimize_tail_calls_1 (bool opt_tailcalls) |
1101 | { |
1102 | edge e; |
1103 | bool phis_constructed = false; |
1104 | struct tailcall *tailcalls = NULLnullptr, *act, *next; |
1105 | bool changed = false; |
1106 | basic_block first = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_entry_block_ptr)); |
1107 | tree param; |
1108 | gimple *stmt; |
1109 | edge_iterator ei; |
1110 | |
1111 | if (!suitable_for_tail_opt_p ()) |
1112 | return 0; |
1113 | if (opt_tailcalls) |
1114 | opt_tailcalls = suitable_for_tail_call_opt_p (); |
1115 | |
1116 | FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)for ((ei) = ei_start_1 (&(((((cfun + 0))->cfg->x_exit_block_ptr )->preds))); ei_cond ((ei), &(e)); ei_next (&(ei)) ) |
1117 | { |
1118 | /* Only traverse the normal exits, i.e. those that end with return |
1119 | statement. */ |
1120 | stmt = last_stmt (e->src); |
1121 | |
1122 | if (stmt |
1123 | && gimple_code (stmt) == GIMPLE_RETURN) |
1124 | find_tail_calls (e->src, &tailcalls); |
1125 | } |
1126 | |
1127 | if (live_vars) |
1128 | { |
1129 | destroy_live_vars (live_vars_vec); |
1130 | delete live_vars; |
1131 | live_vars = NULLnullptr; |
1132 | } |
1133 | |
1134 | /* Construct the phi nodes and accumulators if necessary. */ |
1135 | a_acc = m_acc = NULL_TREE(tree) nullptr; |
1136 | for (act = tailcalls; act; act = act->next) |
1137 | { |
1138 | if (!act->tail_recursion) |
1139 | continue; |
1140 | |
1141 | if (!phis_constructed) |
1142 | { |
1143 | /* Ensure that there is only one predecessor of the block |
1144 | or if there are existing degenerate PHI nodes. */ |
1145 | if (!single_pred_p (first) |
1146 | || !gimple_seq_empty_p (phi_nodes (first))) |
1147 | first = |
1148 | split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_entry_block_ptr))); |
1149 | |
1150 | /* Copy the args if needed. */ |
1151 | unsigned idx; |
1152 | for (param = DECL_ARGUMENTS (current_function_decl)((tree_check ((current_function_decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 1152, __FUNCTION__, (FUNCTION_DECL)))->function_decl.arguments ), idx = 0; |
1153 | param; |
1154 | param = DECL_CHAIN (param)(((contains_struct_check (((contains_struct_check ((param), ( TS_DECL_MINIMAL), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 1154, __FUNCTION__))), (TS_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 1154, __FUNCTION__))->common.chain)), idx++) |
1155 | if (bitmap_bit_p (tailr_arg_needs_copy, idx)) |
1156 | { |
1157 | tree name = ssa_default_def (cfun(cfun + 0), param); |
1158 | tree new_name = make_ssa_name (param, SSA_NAME_DEF_STMT (name)(tree_check ((name), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 1158, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt); |
1159 | gphi *phi; |
1160 | |
1161 | set_ssa_default_def (cfun(cfun + 0), param, new_name); |
1162 | phi = create_phi_node (name, first); |
1163 | add_phi_arg (phi, new_name, single_pred_edge (first), |
1164 | EXPR_LOCATION (param)((((param)) && ((tree_code_type_tmpl <0>::tree_code_type [(int) (((enum tree_code) ((param))->base.code))]) >= tcc_reference && (tree_code_type_tmpl <0>::tree_code_type[(int ) (((enum tree_code) ((param))->base.code))]) <= tcc_expression )) ? (param)->exp.locus : ((location_t) 0))); |
1165 | } |
1166 | phis_constructed = true; |
1167 | } |
1168 | tree ret_type = TREE_TYPE (DECL_RESULT (current_function_decl))((contains_struct_check ((((tree_check ((current_function_decl ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 1168, __FUNCTION__, (FUNCTION_DECL)))->decl_non_common.result )), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-tailcall.cc" , 1168, __FUNCTION__))->typed.type); |
1169 | if (POINTER_TYPE_P (ret_type)(((enum tree_code) (ret_type)->base.code) == POINTER_TYPE || ((enum tree_code) (ret_type)->base.code) == REFERENCE_TYPE )) |
1170 | ret_type = sizetypesizetype_tab[(int) stk_sizetype]; |
1171 | |
1172 | if (act->add && !a_acc) |
1173 | a_acc = create_tailcall_accumulator ("add_acc", first, |
1174 | build_zero_cst (ret_type)); |
1175 | |
1176 | if (act->mult && !m_acc) |
1177 | m_acc = create_tailcall_accumulator ("mult_acc", first, |
1178 | build_one_cst (ret_type)); |
1179 | } |
1180 | |
1181 | if (a_acc || m_acc) |
1182 | { |
1183 | /* When the tail call elimination using accumulators is performed, |
1184 | statements adding the accumulated value are inserted at all exits. |
1185 | This turns all other tail calls to non-tail ones. */ |
1186 | opt_tailcalls = false; |
1187 | } |
1188 | |
1189 | class loop *new_loop = NULLnullptr; |
1190 | for (; tailcalls; tailcalls = next) |
1191 | { |
1192 | next = tailcalls->next; |
1193 | changed |= optimize_tail_call (tailcalls, opt_tailcalls, new_loop); |
1194 | free (tailcalls); |
1195 | } |
1196 | if (new_loop) |
1197 | add_loop (new_loop, loops_for_fn (cfun(cfun + 0))->tree_root); |
1198 | |
1199 | if (a_acc || m_acc) |
1200 | { |
1201 | /* Modify the remaining return statements. */ |
1202 | FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)for ((ei) = ei_start_1 (&(((((cfun + 0))->cfg->x_exit_block_ptr )->preds))); ei_cond ((ei), &(e)); ei_next (&(ei)) ) |
1203 | { |
1204 | stmt = last_stmt (e->src); |
1205 | |
1206 | if (stmt |
1207 | && gimple_code (stmt) == GIMPLE_RETURN) |
1208 | adjust_return_value (e->src, m_acc, a_acc); |
1209 | } |
1210 | } |
1211 | |
1212 | if (changed) |
1213 | free_dominance_info (CDI_DOMINATORS); |
1214 | |
1215 | /* Add phi nodes for the virtual operands defined in the function to the |
1216 | header of the loop created by tail recursion elimination. Do so |
1217 | by triggering the SSA renamer. */ |
1218 | if (phis_constructed) |
1219 | mark_virtual_operands_for_renaming (cfun(cfun + 0)); |
1220 | |
1221 | if (tailr_arg_needs_copy) |
1222 | BITMAP_FREE (tailr_arg_needs_copy)((void) (bitmap_obstack_free ((bitmap) tailr_arg_needs_copy), (tailr_arg_needs_copy) = (bitmap) nullptr)); |
1223 | |
1224 | if (changed) |
1225 | return TODO_cleanup_cfg(1 << 5) | TODO_update_ssa_only_virtuals(1 << 14); |
1226 | return 0; |
1227 | } |
1228 | |
1229 | static bool |
1230 | gate_tail_calls (void) |
1231 | { |
1232 | return flag_optimize_sibling_callsglobal_options.x_flag_optimize_sibling_calls != 0 && dbg_cnt (tail_call); |
1233 | } |
1234 | |
1235 | static unsigned int |
1236 | execute_tail_calls (void) |
1237 | { |
1238 | return tree_optimize_tail_calls_1 (true); |
1239 | } |
1240 | |
1241 | namespace { |
1242 | |
1243 | const pass_data pass_data_tail_recursion = |
1244 | { |
1245 | GIMPLE_PASS, /* type */ |
1246 | "tailr", /* name */ |
1247 | OPTGROUP_NONE, /* optinfo_flags */ |
1248 | TV_NONE, /* tv_id */ |
1249 | ( PROP_cfg(1 << 3) | PROP_ssa(1 << 5) ), /* properties_required */ |
1250 | 0, /* properties_provided */ |
1251 | 0, /* properties_destroyed */ |
1252 | 0, /* todo_flags_start */ |
1253 | 0, /* todo_flags_finish */ |
1254 | }; |
1255 | |
1256 | class pass_tail_recursion : public gimple_opt_pass |
1257 | { |
1258 | public: |
1259 | pass_tail_recursion (gcc::context *ctxt) |
1260 | : gimple_opt_pass (pass_data_tail_recursion, ctxt) |
1261 | {} |
1262 | |
1263 | /* opt_pass methods: */ |
1264 | opt_pass * clone () final override |
1265 | { |
1266 | return new pass_tail_recursion (m_ctxt); |
1267 | } |
1268 | bool gate (function *) final override { return gate_tail_calls (); } |
1269 | unsigned int execute (function *) final override |
1270 | { |
1271 | return tree_optimize_tail_calls_1 (false); |
1272 | } |
1273 | |
1274 | }; // class pass_tail_recursion |
1275 | |
1276 | } // anon namespace |
1277 | |
1278 | gimple_opt_pass * |
1279 | make_pass_tail_recursion (gcc::context *ctxt) |
1280 | { |
1281 | return new pass_tail_recursion (ctxt); |
1282 | } |
1283 | |
1284 | namespace { |
1285 | |
1286 | const pass_data pass_data_tail_calls = |
1287 | { |
1288 | GIMPLE_PASS, /* type */ |
1289 | "tailc", /* name */ |
1290 | OPTGROUP_NONE, /* optinfo_flags */ |
1291 | TV_NONE, /* tv_id */ |
1292 | ( PROP_cfg(1 << 3) | PROP_ssa(1 << 5) ), /* properties_required */ |
1293 | 0, /* properties_provided */ |
1294 | 0, /* properties_destroyed */ |
1295 | 0, /* todo_flags_start */ |
1296 | 0, /* todo_flags_finish */ |
1297 | }; |
1298 | |
1299 | class pass_tail_calls : public gimple_opt_pass |
1300 | { |
1301 | public: |
1302 | pass_tail_calls (gcc::context *ctxt) |
1303 | : gimple_opt_pass (pass_data_tail_calls, ctxt) |
1304 | {} |
1305 | |
1306 | /* opt_pass methods: */ |
1307 | bool gate (function *) final override { return gate_tail_calls (); } |
1308 | unsigned int execute (function *) final override |
1309 | { |
1310 | return execute_tail_calls (); |
1311 | } |
1312 | |
1313 | }; // class pass_tail_calls |
1314 | |
1315 | } // anon namespace |
1316 | |
1317 | gimple_opt_pass * |
1318 | make_pass_tail_calls (gcc::context *ctxt) |
1319 | { |
1320 | return new pass_tail_calls (ctxt); |
1321 | } |