File: | build/gcc/cfgloop.cc |
Warning: | line 290, column 5 Called C++ object pointer is null |
Press '?' to see keyboard shortcuts
Keyboard shortcuts:
1 | /* Natural loop discovery code for GNU compiler. | ||||
2 | Copyright (C) 2000-2023 Free Software Foundation, Inc. | ||||
3 | |||||
4 | This file is part of GCC. | ||||
5 | |||||
6 | GCC is free software; you can redistribute it and/or modify it under | ||||
7 | the terms of the GNU General Public License as published by the Free | ||||
8 | Software Foundation; either version 3, or (at your option) any later | ||||
9 | version. | ||||
10 | |||||
11 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY | ||||
12 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | ||||
13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | ||||
14 | for more details. | ||||
15 | |||||
16 | You should have received a copy of the GNU General Public License | ||||
17 | along with GCC; see the file COPYING3. If not see | ||||
18 | <http://www.gnu.org/licenses/>. */ | ||||
19 | |||||
20 | #include "config.h" | ||||
21 | #include "system.h" | ||||
22 | #include "coretypes.h" | ||||
23 | #include "backend.h" | ||||
24 | #include "rtl.h" | ||||
25 | #include "tree.h" | ||||
26 | #include "gimple.h" | ||||
27 | #include "cfghooks.h" | ||||
28 | #include "gimple-ssa.h" | ||||
29 | #include "diagnostic-core.h" | ||||
30 | #include "cfganal.h" | ||||
31 | #include "cfgloop.h" | ||||
32 | #include "gimple-iterator.h" | ||||
33 | #include "dumpfile.h" | ||||
34 | #include "tree-ssa.h" | ||||
35 | #include "tree-pretty-print.h" | ||||
36 | |||||
37 | static void flow_loops_cfg_dump (FILE *); | ||||
38 | |||||
39 | /* Dump loop related CFG information. */ | ||||
40 | |||||
41 | static void | ||||
42 | flow_loops_cfg_dump (FILE *file) | ||||
43 | { | ||||
44 | basic_block bb; | ||||
45 | |||||
46 | if (!file) | ||||
47 | return; | ||||
48 | |||||
49 | FOR_EACH_BB_FN (bb, cfun)for (bb = ((cfun + 0))->cfg->x_entry_block_ptr->next_bb ; bb != ((cfun + 0))->cfg->x_exit_block_ptr; bb = bb-> next_bb) | ||||
50 | { | ||||
51 | edge succ; | ||||
52 | edge_iterator ei; | ||||
53 | |||||
54 | fprintf (file, ";; %d succs { ", bb->index); | ||||
55 | FOR_EACH_EDGE (succ, ei, bb->succs)for ((ei) = ei_start_1 (&((bb->succs))); ei_cond ((ei) , &(succ)); ei_next (&(ei))) | ||||
56 | fprintf (file, "%d ", succ->dest->index); | ||||
57 | fprintf (file, "}\n"); | ||||
58 | } | ||||
59 | } | ||||
60 | |||||
61 | /* Return nonzero if the nodes of LOOP are a subset of OUTER. */ | ||||
62 | |||||
63 | bool | ||||
64 | flow_loop_nested_p (const class loop *outer, const class loop *loop) | ||||
65 | { | ||||
66 | unsigned odepth = loop_depth (outer); | ||||
67 | |||||
68 | return (loop_depth (loop) > odepth | ||||
69 | && (*loop->superloops)[odepth] == outer); | ||||
70 | } | ||||
71 | |||||
72 | /* Returns the loop such that LOOP is nested DEPTH (indexed from zero) | ||||
73 | loops within LOOP. */ | ||||
74 | |||||
75 | class loop * | ||||
76 | superloop_at_depth (class loop *loop, unsigned depth) | ||||
77 | { | ||||
78 | unsigned ldepth = loop_depth (loop); | ||||
79 | |||||
80 | gcc_assert (depth <= ldepth)((void)(!(depth <= ldepth) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cfgloop.cc" , 80, __FUNCTION__), 0 : 0)); | ||||
81 | |||||
82 | if (depth == ldepth) | ||||
83 | return loop; | ||||
84 | |||||
85 | return (*loop->superloops)[depth]; | ||||
86 | } | ||||
87 | |||||
88 | /* Returns the list of the latch edges of LOOP. */ | ||||
89 | |||||
90 | static vec<edge> | ||||
91 | get_loop_latch_edges (const class loop *loop) | ||||
92 | { | ||||
93 | edge_iterator ei; | ||||
94 | edge e; | ||||
95 | vec<edge> ret = vNULL; | ||||
96 | |||||
97 | FOR_EACH_EDGE (e, ei, loop->header->preds)for ((ei) = ei_start_1 (&((loop->header->preds))); ei_cond ((ei), &(e)); ei_next (&(ei))) | ||||
98 | { | ||||
99 | if (dominated_by_p (CDI_DOMINATORS, e->src, loop->header)) | ||||
100 | ret.safe_push (e); | ||||
101 | } | ||||
102 | |||||
103 | return ret; | ||||
104 | } | ||||
105 | |||||
106 | /* Dump the loop information specified by LOOP to the stream FILE | ||||
107 | using auxiliary dump callback function LOOP_DUMP_AUX if non null. */ | ||||
108 | |||||
109 | void | ||||
110 | flow_loop_dump (const class loop *loop, FILE *file, | ||||
111 | void (*loop_dump_aux) (const class loop *, FILE *, int), | ||||
112 | int verbose) | ||||
113 | { | ||||
114 | basic_block *bbs; | ||||
115 | unsigned i; | ||||
116 | vec<edge> latches; | ||||
117 | edge e; | ||||
118 | |||||
119 | if (! loop || ! loop->header) | ||||
120 | return; | ||||
121 | |||||
122 | fprintf (file, ";;\n;; Loop %d\n", loop->num); | ||||
123 | |||||
124 | fprintf (file, ";; header %d, ", loop->header->index); | ||||
125 | if (loop->latch) | ||||
126 | fprintf (file, "latch %d\n", loop->latch->index); | ||||
127 | else | ||||
128 | { | ||||
129 | fprintf (file, "multiple latches:"); | ||||
130 | latches = get_loop_latch_edges (loop); | ||||
131 | FOR_EACH_VEC_ELT (latches, i, e)for (i = 0; (latches).iterate ((i), &(e)); ++(i)) | ||||
132 | fprintf (file, " %d", e->src->index); | ||||
133 | latches.release (); | ||||
134 | fprintf (file, "\n"); | ||||
135 | } | ||||
136 | |||||
137 | fprintf (file, ";; depth %d, outer %ld\n", | ||||
138 | loop_depth (loop), (long) (loop_outer (loop) | ||||
139 | ? loop_outer (loop)->num : -1)); | ||||
140 | |||||
141 | if (loop->latch) | ||||
142 | { | ||||
143 | bool read_profile_p; | ||||
144 | gcov_type nit = expected_loop_iterations_unbounded (loop, &read_profile_p); | ||||
145 | if (read_profile_p && !loop->any_estimate) | ||||
146 | fprintf (file, ";; profile-based iteration count: %" PRIu64"l" "u" "\n", | ||||
147 | (uint64_t) nit); | ||||
148 | } | ||||
149 | |||||
150 | fprintf (file, ";; nodes:"); | ||||
151 | bbs = get_loop_body (loop); | ||||
152 | for (i = 0; i < loop->num_nodes; i++) | ||||
153 | fprintf (file, " %d", bbs[i]->index); | ||||
154 | free (bbs); | ||||
155 | fprintf (file, "\n"); | ||||
156 | |||||
157 | if (loop_dump_aux) | ||||
158 | loop_dump_aux (loop, file, verbose); | ||||
159 | } | ||||
160 | |||||
161 | /* Dump the loop information about loops to the stream FILE, | ||||
162 | using auxiliary dump callback function LOOP_DUMP_AUX if non null. */ | ||||
163 | |||||
164 | void | ||||
165 | flow_loops_dump (FILE *file, void (*loop_dump_aux) (const class loop *, FILE *, int), int verbose) | ||||
166 | { | ||||
167 | if (!current_loops((cfun + 0)->x_current_loops) || ! file) | ||||
168 | return; | ||||
169 | |||||
170 | fprintf (file, ";; %d loops found\n", number_of_loops (cfun(cfun + 0))); | ||||
171 | |||||
172 | for (auto loop : loops_list (cfun(cfun + 0), LI_INCLUDE_ROOT)) | ||||
173 | { | ||||
174 | flow_loop_dump (loop, file, loop_dump_aux, verbose); | ||||
175 | } | ||||
176 | |||||
177 | if (verbose) | ||||
178 | flow_loops_cfg_dump (file); | ||||
179 | } | ||||
180 | |||||
181 | /* Free data allocated for LOOP. */ | ||||
182 | |||||
183 | void | ||||
184 | flow_loop_free (class loop *loop) | ||||
185 | { | ||||
186 | struct loop_exit *exit, *next; | ||||
187 | |||||
188 | vec_free (loop->superloops); | ||||
189 | |||||
190 | /* Break the list of the loop exit records. They will be freed when the | ||||
191 | corresponding edge is rescanned or removed, and this avoids | ||||
192 | accessing the (already released) head of the list stored in the | ||||
193 | loop structure. */ | ||||
194 | for (exit = loop->exits->next; exit != loop->exits; exit = next) | ||||
195 | { | ||||
196 | next = exit->next; | ||||
197 | exit->next = exit; | ||||
198 | exit->prev = exit; | ||||
199 | } | ||||
200 | |||||
201 | ggc_free (loop->exits); | ||||
202 | ggc_free (loop); | ||||
203 | } | ||||
204 | |||||
205 | /* Free all the memory allocated for LOOPS. */ | ||||
206 | |||||
207 | void | ||||
208 | flow_loops_free (struct loops *loops) | ||||
209 | { | ||||
210 | if (loops->larray) | ||||
211 | { | ||||
212 | unsigned i; | ||||
213 | loop_p loop; | ||||
214 | |||||
215 | /* Free the loop descriptors. */ | ||||
216 | FOR_EACH_VEC_SAFE_ELT (loops->larray, i, loop)for (i = 0; vec_safe_iterate ((loops->larray), (i), &( loop)); ++(i)) | ||||
217 | { | ||||
218 | if (!loop) | ||||
219 | continue; | ||||
220 | |||||
221 | flow_loop_free (loop); | ||||
222 | } | ||||
223 | |||||
224 | vec_free (loops->larray); | ||||
225 | } | ||||
226 | } | ||||
227 | |||||
228 | /* Find the nodes contained within the LOOP with header HEADER. | ||||
229 | Return the number of nodes within the loop. */ | ||||
230 | |||||
231 | int | ||||
232 | flow_loop_nodes_find (basic_block header, class loop *loop) | ||||
233 | { | ||||
234 | vec<basic_block> stack = vNULL; | ||||
235 | int num_nodes = 1; | ||||
236 | edge latch; | ||||
237 | edge_iterator latch_ei; | ||||
238 | |||||
239 | header->loop_father = loop; | ||||
240 | |||||
241 | FOR_EACH_EDGE (latch, latch_ei, loop->header->preds)for ((latch_ei) = ei_start_1 (&((loop->header->preds ))); ei_cond ((latch_ei), &(latch)); ei_next (&(latch_ei ))) | ||||
242 | { | ||||
243 | if (latch->src->loop_father == loop | ||||
244 | || !dominated_by_p (CDI_DOMINATORS, latch->src, loop->header)) | ||||
245 | continue; | ||||
246 | |||||
247 | num_nodes++; | ||||
248 | stack.safe_push (latch->src); | ||||
249 | latch->src->loop_father = loop; | ||||
250 | |||||
251 | while (!stack.is_empty ()) | ||||
252 | { | ||||
253 | basic_block node; | ||||
254 | edge e; | ||||
255 | edge_iterator ei; | ||||
256 | |||||
257 | node = stack.pop (); | ||||
258 | |||||
259 | FOR_EACH_EDGE (e, ei, node->preds)for ((ei) = ei_start_1 (&((node->preds))); ei_cond ((ei ), &(e)); ei_next (&(ei))) | ||||
260 | { | ||||
261 | basic_block ancestor = e->src; | ||||
262 | |||||
263 | if (ancestor->loop_father != loop) | ||||
264 | { | ||||
265 | ancestor->loop_father = loop; | ||||
266 | num_nodes++; | ||||
267 | stack.safe_push (ancestor); | ||||
268 | } | ||||
269 | } | ||||
270 | } | ||||
271 | } | ||||
272 | stack.release (); | ||||
273 | |||||
274 | return num_nodes; | ||||
275 | } | ||||
276 | |||||
277 | /* Records the vector of superloops of the loop LOOP, whose immediate | ||||
278 | superloop is FATHER. */ | ||||
279 | |||||
280 | static void | ||||
281 | establish_preds (class loop *loop, class loop *father) | ||||
282 | { | ||||
283 | loop_p ploop; | ||||
284 | unsigned depth = loop_depth (father) + 1; | ||||
285 | unsigned i; | ||||
286 | |||||
287 | loop->superloops = 0; | ||||
288 | vec_alloc (loop->superloops, depth); | ||||
289 | FOR_EACH_VEC_SAFE_ELT (father->superloops, i, ploop)for (i = 0; vec_safe_iterate ((father->superloops), (i), & (ploop)); ++(i)) | ||||
290 | loop->superloops->quick_push (ploop); | ||||
| |||||
291 | loop->superloops->quick_push (father); | ||||
292 | |||||
293 | for (ploop = loop->inner; ploop; ploop = ploop->next) | ||||
294 | establish_preds (ploop, loop); | ||||
295 | } | ||||
296 | |||||
297 | /* Add LOOP to the loop hierarchy tree where FATHER is father of the | ||||
298 | added loop. If LOOP has some children, take care of that their | ||||
299 | pred field will be initialized correctly. If AFTER is non-null | ||||
300 | then it's expected it's a pointer into FATHERs inner sibling | ||||
301 | list and LOOP is added behind AFTER, otherwise it's added in front | ||||
302 | of FATHERs siblings. */ | ||||
303 | |||||
304 | void | ||||
305 | flow_loop_tree_node_add (class loop *father, class loop *loop, | ||||
306 | class loop *after) | ||||
307 | { | ||||
308 | if (after
| ||||
309 | { | ||||
310 | loop->next = after->next; | ||||
311 | after->next = loop; | ||||
312 | } | ||||
313 | else | ||||
314 | { | ||||
315 | loop->next = father->inner; | ||||
316 | father->inner = loop; | ||||
317 | } | ||||
318 | |||||
319 | establish_preds (loop, father); | ||||
320 | } | ||||
321 | |||||
322 | /* Remove LOOP from the loop hierarchy tree. */ | ||||
323 | |||||
324 | void | ||||
325 | flow_loop_tree_node_remove (class loop *loop) | ||||
326 | { | ||||
327 | class loop *prev, *father; | ||||
328 | |||||
329 | father = loop_outer (loop); | ||||
330 | |||||
331 | /* Remove loop from the list of sons. */ | ||||
332 | if (father->inner == loop) | ||||
333 | father->inner = loop->next; | ||||
334 | else | ||||
335 | { | ||||
336 | for (prev = father->inner; prev->next != loop; prev = prev->next) | ||||
337 | continue; | ||||
338 | prev->next = loop->next; | ||||
339 | } | ||||
340 | |||||
341 | loop->superloops = NULLnullptr; | ||||
342 | } | ||||
343 | |||||
344 | /* Allocates and returns new loop structure. */ | ||||
345 | |||||
346 | class loop * | ||||
347 | alloc_loop (void) | ||||
348 | { | ||||
349 | class loop *loop = ggc_cleared_alloc<class loop> (); | ||||
350 | |||||
351 | loop->exits = ggc_cleared_alloc<loop_exit> (); | ||||
352 | loop->exits->next = loop->exits->prev = loop->exits; | ||||
353 | loop->can_be_parallel = false; | ||||
354 | loop->constraints = 0; | ||||
355 | loop->nb_iterations_upper_bound = 0; | ||||
356 | loop->nb_iterations_likely_upper_bound = 0; | ||||
357 | loop->nb_iterations_estimate = 0; | ||||
358 | return loop; | ||||
359 | } | ||||
360 | |||||
361 | /* Initializes loops structure LOOPS, reserving place for NUM_LOOPS loops | ||||
362 | (including the root of the loop tree). */ | ||||
363 | |||||
364 | void | ||||
365 | init_loops_structure (struct function *fn, | ||||
366 | struct loops *loops, unsigned num_loops) | ||||
367 | { | ||||
368 | class loop *root; | ||||
369 | |||||
370 | memset (loops, 0, sizeof *loops); | ||||
371 | vec_alloc (loops->larray, num_loops); | ||||
372 | |||||
373 | /* Dummy loop containing whole function. */ | ||||
374 | root = alloc_loop (); | ||||
375 | root->num_nodes = n_basic_blocks_for_fn (fn)((fn)->cfg->x_n_basic_blocks); | ||||
376 | root->latch = EXIT_BLOCK_PTR_FOR_FN (fn)((fn)->cfg->x_exit_block_ptr); | ||||
377 | root->header = ENTRY_BLOCK_PTR_FOR_FN (fn)((fn)->cfg->x_entry_block_ptr); | ||||
378 | ENTRY_BLOCK_PTR_FOR_FN (fn)((fn)->cfg->x_entry_block_ptr)->loop_father = root; | ||||
379 | EXIT_BLOCK_PTR_FOR_FN (fn)((fn)->cfg->x_exit_block_ptr)->loop_father = root; | ||||
380 | |||||
381 | loops->larray->quick_push (root); | ||||
382 | loops->tree_root = root; | ||||
383 | } | ||||
384 | |||||
385 | /* Returns whether HEADER is a loop header. */ | ||||
386 | |||||
387 | bool | ||||
388 | bb_loop_header_p (basic_block header) | ||||
389 | { | ||||
390 | edge_iterator ei; | ||||
391 | edge e; | ||||
392 | |||||
393 | /* If we have an abnormal predecessor, do not consider the | ||||
394 | loop (not worth the problems). */ | ||||
395 | if (bb_has_abnormal_pred (header)) | ||||
396 | return false; | ||||
397 | |||||
398 | /* Look for back edges where a predecessor is dominated | ||||
399 | by this block. A natural loop has a single entry | ||||
400 | node (header) that dominates all the nodes in the | ||||
401 | loop. It also has single back edge to the header | ||||
402 | from a latch node. */ | ||||
403 | FOR_EACH_EDGE (e, ei, header->preds)for ((ei) = ei_start_1 (&((header->preds))); ei_cond ( (ei), &(e)); ei_next (&(ei))) | ||||
404 | { | ||||
405 | basic_block latch = e->src; | ||||
406 | if (latch != ENTRY_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_entry_block_ptr) | ||||
407 | && dominated_by_p (CDI_DOMINATORS, latch, header)) | ||||
408 | return true; | ||||
409 | } | ||||
410 | |||||
411 | return false; | ||||
412 | } | ||||
413 | |||||
414 | /* Find all the natural loops in the function and save in LOOPS structure and | ||||
415 | recalculate loop_father information in basic block structures. | ||||
416 | If LOOPS is non-NULL then the loop structures for already recorded loops | ||||
417 | will be re-used and their number will not change. We assume that no | ||||
418 | stale loops exist in LOOPS. | ||||
419 | When LOOPS is NULL it is allocated and re-built from scratch. | ||||
420 | Return the built LOOPS structure. */ | ||||
421 | |||||
422 | struct loops * | ||||
423 | flow_loops_find (struct loops *loops) | ||||
424 | { | ||||
425 | bool from_scratch = (loops == NULLnullptr); | ||||
| |||||
426 | int *rc_order; | ||||
427 | int b; | ||||
428 | unsigned i; | ||||
429 | |||||
430 | /* Ensure that the dominators are computed. */ | ||||
431 | calculate_dominance_info (CDI_DOMINATORS); | ||||
432 | |||||
433 | if (!loops
| ||||
434 | { | ||||
435 | loops = ggc_cleared_alloc<struct loops> (); | ||||
436 | init_loops_structure (cfun(cfun + 0), loops, 1); | ||||
437 | } | ||||
438 | |||||
439 | /* Ensure that loop exits were released. */ | ||||
440 | gcc_assert (loops->exits == NULL)((void)(!(loops->exits == nullptr) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cfgloop.cc" , 440, __FUNCTION__), 0 : 0)); | ||||
441 | |||||
442 | /* Taking care of this degenerate case makes the rest of | ||||
443 | this code simpler. */ | ||||
444 | if (n_basic_blocks_for_fn (cfun)(((cfun + 0))->cfg->x_n_basic_blocks) == NUM_FIXED_BLOCKS(2)) | ||||
445 | return loops; | ||||
446 | |||||
447 | /* The root loop node contains all basic-blocks. */ | ||||
448 | loops->tree_root->num_nodes = n_basic_blocks_for_fn (cfun)(((cfun + 0))->cfg->x_n_basic_blocks); | ||||
449 | |||||
450 | /* Compute depth first search order of the CFG so that outer | ||||
451 | natural loops will be found before inner natural loops. */ | ||||
452 | rc_order = XNEWVEC (int, n_basic_blocks_for_fn (cfun))((int *) xmalloc (sizeof (int) * ((((cfun + 0))->cfg->x_n_basic_blocks )))); | ||||
453 | pre_and_rev_post_order_compute (NULLnullptr, rc_order, false); | ||||
454 | |||||
455 | /* Gather all loop headers in reverse completion order and allocate | ||||
456 | loop structures for loops that are not already present. */ | ||||
457 | auto_vec<loop_p> larray (loops->larray->length ()); | ||||
458 | for (b = 0; b < n_basic_blocks_for_fn (cfun)(((cfun + 0))->cfg->x_n_basic_blocks) - NUM_FIXED_BLOCKS(2); b++) | ||||
459 | { | ||||
460 | basic_block header = BASIC_BLOCK_FOR_FN (cfun, rc_order[b])((*(((cfun + 0))->cfg->x_basic_block_info))[(rc_order[b ])]); | ||||
461 | if (bb_loop_header_p (header)) | ||||
462 | { | ||||
463 | class loop *loop; | ||||
464 | |||||
465 | /* The current active loop tree has valid loop-fathers for | ||||
466 | header blocks. */ | ||||
467 | if (!from_scratch | ||||
468 | && header->loop_father->header == header) | ||||
469 | { | ||||
470 | loop = header->loop_father; | ||||
471 | /* If we found an existing loop remove it from the | ||||
472 | loop tree. It is going to be inserted again | ||||
473 | below. */ | ||||
474 | flow_loop_tree_node_remove (loop); | ||||
475 | } | ||||
476 | else | ||||
477 | { | ||||
478 | /* Otherwise allocate a new loop structure for the loop. */ | ||||
479 | loop = alloc_loop (); | ||||
480 | /* ??? We could re-use unused loop slots here. */ | ||||
481 | loop->num = loops->larray->length (); | ||||
482 | vec_safe_push (loops->larray, loop); | ||||
483 | loop->header = header; | ||||
484 | |||||
485 | if (!from_scratch | ||||
486 | && dump_file && (dump_flags & TDF_DETAILS)) | ||||
487 | fprintf (dump_file, "flow_loops_find: discovered new " | ||||
488 | "loop %d with header %d\n", | ||||
489 | loop->num, header->index); | ||||
490 | } | ||||
491 | /* Reset latch, we recompute it below. */ | ||||
492 | loop->latch = NULLnullptr; | ||||
493 | larray.safe_push (loop); | ||||
494 | } | ||||
495 | |||||
496 | /* Make blocks part of the loop root node at start. */ | ||||
497 | header->loop_father = loops->tree_root; | ||||
498 | } | ||||
499 | |||||
500 | free (rc_order); | ||||
501 | |||||
502 | /* Now iterate over the loops found, insert them into the loop tree | ||||
503 | and assign basic-block ownership. */ | ||||
504 | for (i = 0; i < larray.length (); ++i) | ||||
505 | { | ||||
506 | class loop *loop = larray[i]; | ||||
507 | basic_block header = loop->header; | ||||
508 | edge_iterator ei; | ||||
509 | edge e; | ||||
510 | |||||
511 | flow_loop_tree_node_add (header->loop_father, loop); | ||||
512 | loop->num_nodes = flow_loop_nodes_find (loop->header, loop); | ||||
513 | |||||
514 | /* Look for the latch for this header block, if it has just a | ||||
515 | single one. */ | ||||
516 | FOR_EACH_EDGE (e, ei, header->preds)for ((ei) = ei_start_1 (&((header->preds))); ei_cond ( (ei), &(e)); ei_next (&(ei))) | ||||
517 | { | ||||
518 | basic_block latch = e->src; | ||||
519 | |||||
520 | if (flow_bb_inside_loop_p (loop, latch)) | ||||
521 | { | ||||
522 | if (loop->latch != NULLnullptr) | ||||
523 | { | ||||
524 | /* More than one latch edge. */ | ||||
525 | loop->latch = NULLnullptr; | ||||
526 | break; | ||||
527 | } | ||||
528 | loop->latch = latch; | ||||
529 | } | ||||
530 | } | ||||
531 | } | ||||
532 | |||||
533 | return loops; | ||||
534 | } | ||||
535 | |||||
536 | /* qsort helper for sort_sibling_loops. */ | ||||
537 | |||||
538 | static int *sort_sibling_loops_cmp_rpo; | ||||
539 | static int | ||||
540 | sort_sibling_loops_cmp (const void *la_, const void *lb_) | ||||
541 | { | ||||
542 | const class loop *la = *(const class loop * const *)la_; | ||||
543 | const class loop *lb = *(const class loop * const *)lb_; | ||||
544 | return (sort_sibling_loops_cmp_rpo[la->header->index] | ||||
545 | - sort_sibling_loops_cmp_rpo[lb->header->index]); | ||||
546 | } | ||||
547 | |||||
548 | /* Sort sibling loops in RPO order. */ | ||||
549 | |||||
550 | void | ||||
551 | sort_sibling_loops (function *fn) | ||||
552 | { | ||||
553 | /* Match flow_loops_find in the order we sort sibling loops. */ | ||||
554 | sort_sibling_loops_cmp_rpo = XNEWVEC (int, last_basic_block_for_fn (cfun))((int *) xmalloc (sizeof (int) * ((((cfun + 0))->cfg->x_last_basic_block )))); | ||||
555 | int *rc_order = XNEWVEC (int, n_basic_blocks_for_fn (cfun))((int *) xmalloc (sizeof (int) * ((((cfun + 0))->cfg->x_n_basic_blocks )))); | ||||
556 | pre_and_rev_post_order_compute_fn (fn, NULLnullptr, rc_order, false); | ||||
557 | for (int i = 0; i < n_basic_blocks_for_fn (cfun)(((cfun + 0))->cfg->x_n_basic_blocks) - NUM_FIXED_BLOCKS(2); ++i) | ||||
558 | sort_sibling_loops_cmp_rpo[rc_order[i]] = i; | ||||
559 | free (rc_order); | ||||
560 | |||||
561 | auto_vec<loop_p, 3> siblings; | ||||
562 | for (auto loop : loops_list (fn, LI_INCLUDE_ROOT)) | ||||
563 | if (loop->inner && loop->inner->next) | ||||
564 | { | ||||
565 | loop_p sibling = loop->inner; | ||||
566 | do | ||||
567 | { | ||||
568 | siblings.safe_push (sibling); | ||||
569 | sibling = sibling->next; | ||||
570 | } | ||||
571 | while (sibling); | ||||
572 | siblings.qsort (sort_sibling_loops_cmp)qsort (sort_sibling_loops_cmp); | ||||
573 | loop_p *siblingp = &loop->inner; | ||||
574 | for (unsigned i = 0; i < siblings.length (); ++i) | ||||
575 | { | ||||
576 | *siblingp = siblings[i]; | ||||
577 | siblingp = &(*siblingp)->next; | ||||
578 | } | ||||
579 | *siblingp = NULLnullptr; | ||||
580 | siblings.truncate (0); | ||||
581 | } | ||||
582 | |||||
583 | free (sort_sibling_loops_cmp_rpo); | ||||
584 | sort_sibling_loops_cmp_rpo = NULLnullptr; | ||||
585 | } | ||||
586 | |||||
587 | /* Ratio of frequencies of edges so that one of more latch edges is | ||||
588 | considered to belong to inner loop with same header. */ | ||||
589 | #define HEAVY_EDGE_RATIO8 8 | ||||
590 | |||||
591 | /* Minimum number of samples for that we apply | ||||
592 | find_subloop_latch_edge_by_profile heuristics. */ | ||||
593 | #define HEAVY_EDGE_MIN_SAMPLES10 10 | ||||
594 | |||||
595 | /* If the profile info is available, finds an edge in LATCHES that much more | ||||
596 | frequent than the remaining edges. Returns such an edge, or NULL if we do | ||||
597 | not find one. | ||||
598 | |||||
599 | We do not use guessed profile here, only the measured one. The guessed | ||||
600 | profile is usually too flat and unreliable for this (and it is mostly based | ||||
601 | on the loop structure of the program, so it does not make much sense to | ||||
602 | derive the loop structure from it). */ | ||||
603 | |||||
604 | static edge | ||||
605 | find_subloop_latch_edge_by_profile (vec<edge> latches) | ||||
606 | { | ||||
607 | unsigned i; | ||||
608 | edge e, me = NULLnullptr; | ||||
609 | profile_count mcount = profile_count::zero (), tcount = profile_count::zero (); | ||||
610 | |||||
611 | FOR_EACH_VEC_ELT (latches, i, e)for (i = 0; (latches).iterate ((i), &(e)); ++(i)) | ||||
612 | { | ||||
613 | if (e->count ()> mcount) | ||||
614 | { | ||||
615 | me = e; | ||||
616 | mcount = e->count(); | ||||
617 | } | ||||
618 | tcount += e->count(); | ||||
619 | } | ||||
620 | |||||
621 | if (!tcount.initialized_p () || !(tcount.ipa () > HEAVY_EDGE_MIN_SAMPLES10) | ||||
622 | || (tcount - mcount) * HEAVY_EDGE_RATIO8 > tcount) | ||||
623 | return NULLnullptr; | ||||
624 | |||||
625 | if (dump_file) | ||||
626 | fprintf (dump_file, | ||||
627 | "Found latch edge %d -> %d using profile information.\n", | ||||
628 | me->src->index, me->dest->index); | ||||
629 | return me; | ||||
630 | } | ||||
631 | |||||
632 | /* Among LATCHES, guesses a latch edge of LOOP corresponding to subloop, based | ||||
633 | on the structure of induction variables. Returns this edge, or NULL if we | ||||
634 | do not find any. | ||||
635 | |||||
636 | We are quite conservative, and look just for an obvious simple innermost | ||||
637 | loop (which is the case where we would lose the most performance by not | ||||
638 | disambiguating the loop). More precisely, we look for the following | ||||
639 | situation: The source of the chosen latch edge dominates sources of all | ||||
640 | the other latch edges. Additionally, the header does not contain a phi node | ||||
641 | such that the argument from the chosen edge is equal to the argument from | ||||
642 | another edge. */ | ||||
643 | |||||
644 | static edge | ||||
645 | find_subloop_latch_edge_by_ivs (class loop *loop ATTRIBUTE_UNUSED__attribute__ ((__unused__)), vec<edge> latches) | ||||
646 | { | ||||
647 | edge e, latch = latches[0]; | ||||
648 | unsigned i; | ||||
649 | gphi *phi; | ||||
650 | gphi_iterator psi; | ||||
651 | tree lop; | ||||
652 | basic_block bb; | ||||
653 | |||||
654 | /* Find the candidate for the latch edge. */ | ||||
655 | for (i = 1; latches.iterate (i, &e); i++) | ||||
656 | if (dominated_by_p (CDI_DOMINATORS, latch->src, e->src)) | ||||
657 | latch = e; | ||||
658 | |||||
659 | /* Verify that it dominates all the latch edges. */ | ||||
660 | FOR_EACH_VEC_ELT (latches, i, e)for (i = 0; (latches).iterate ((i), &(e)); ++(i)) | ||||
661 | if (!dominated_by_p (CDI_DOMINATORS, e->src, latch->src)) | ||||
662 | return NULLnullptr; | ||||
663 | |||||
664 | /* Check for a phi node that would deny that this is a latch edge of | ||||
665 | a subloop. */ | ||||
666 | for (psi = gsi_start_phis (loop->header); !gsi_end_p (psi); gsi_next (&psi)) | ||||
667 | { | ||||
668 | phi = psi.phi (); | ||||
669 | lop = PHI_ARG_DEF_FROM_EDGE (phi, latch)gimple_phi_arg_def (((phi)), ((latch)->dest_idx)); | ||||
670 | |||||
671 | /* Ignore the values that are not changed inside the subloop. */ | ||||
672 | if (TREE_CODE (lop)((enum tree_code) (lop)->base.code) != SSA_NAME | ||||
673 | || SSA_NAME_DEF_STMT (lop)(tree_check ((lop), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cfgloop.cc" , 673, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt == phi) | ||||
674 | continue; | ||||
675 | bb = gimple_bb (SSA_NAME_DEF_STMT (lop)(tree_check ((lop), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cfgloop.cc" , 675, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt); | ||||
676 | if (!bb || !flow_bb_inside_loop_p (loop, bb)) | ||||
677 | continue; | ||||
678 | |||||
679 | FOR_EACH_VEC_ELT (latches, i, e)for (i = 0; (latches).iterate ((i), &(e)); ++(i)) | ||||
680 | if (e != latch | ||||
681 | && PHI_ARG_DEF_FROM_EDGE (phi, e)gimple_phi_arg_def (((phi)), ((e)->dest_idx)) == lop) | ||||
682 | return NULLnullptr; | ||||
683 | } | ||||
684 | |||||
685 | if (dump_file) | ||||
686 | fprintf (dump_file, | ||||
687 | "Found latch edge %d -> %d using iv structure.\n", | ||||
688 | latch->src->index, latch->dest->index); | ||||
689 | return latch; | ||||
690 | } | ||||
691 | |||||
692 | /* If we can determine that one of the several latch edges of LOOP behaves | ||||
693 | as a latch edge of a separate subloop, returns this edge. Otherwise | ||||
694 | returns NULL. */ | ||||
695 | |||||
696 | static edge | ||||
697 | find_subloop_latch_edge (class loop *loop) | ||||
698 | { | ||||
699 | vec<edge> latches = get_loop_latch_edges (loop); | ||||
700 | edge latch = NULLnullptr; | ||||
701 | |||||
702 | if (latches.length () > 1) | ||||
703 | { | ||||
704 | latch = find_subloop_latch_edge_by_profile (latches); | ||||
705 | |||||
706 | if (!latch | ||||
707 | /* We consider ivs to guess the latch edge only in SSA. Perhaps we | ||||
708 | should use cfghook for this, but it is hard to imagine it would | ||||
709 | be useful elsewhere. */ | ||||
710 | && current_ir_type () == IR_GIMPLE) | ||||
711 | latch = find_subloop_latch_edge_by_ivs (loop, latches); | ||||
712 | } | ||||
713 | |||||
714 | latches.release (); | ||||
715 | return latch; | ||||
716 | } | ||||
717 | |||||
718 | /* Callback for make_forwarder_block. Returns true if the edge E is marked | ||||
719 | in the set MFB_REIS_SET. */ | ||||
720 | |||||
721 | static hash_set<edge> *mfb_reis_set; | ||||
722 | static bool | ||||
723 | mfb_redirect_edges_in_set (edge e) | ||||
724 | { | ||||
725 | return mfb_reis_set->contains (e); | ||||
726 | } | ||||
727 | |||||
728 | /* Creates a subloop of LOOP with latch edge LATCH. */ | ||||
729 | |||||
730 | static void | ||||
731 | form_subloop (class loop *loop, edge latch) | ||||
732 | { | ||||
733 | edge_iterator ei; | ||||
734 | edge e, new_entry; | ||||
735 | class loop *new_loop; | ||||
736 | |||||
737 | mfb_reis_set = new hash_set<edge>; | ||||
738 | FOR_EACH_EDGE (e, ei, loop->header->preds)for ((ei) = ei_start_1 (&((loop->header->preds))); ei_cond ((ei), &(e)); ei_next (&(ei))) | ||||
739 | { | ||||
740 | if (e != latch) | ||||
741 | mfb_reis_set->add (e); | ||||
742 | } | ||||
743 | new_entry = make_forwarder_block (loop->header, mfb_redirect_edges_in_set, | ||||
744 | NULLnullptr); | ||||
745 | delete mfb_reis_set; | ||||
746 | |||||
747 | loop->header = new_entry->src; | ||||
748 | |||||
749 | /* Find the blocks and subloops that belong to the new loop, and add it to | ||||
750 | the appropriate place in the loop tree. */ | ||||
751 | new_loop = alloc_loop (); | ||||
752 | new_loop->header = new_entry->dest; | ||||
753 | new_loop->latch = latch->src; | ||||
754 | add_loop (new_loop, loop); | ||||
755 | } | ||||
756 | |||||
757 | /* Make all the latch edges of LOOP to go to a single forwarder block -- | ||||
758 | a new latch of LOOP. */ | ||||
759 | |||||
760 | static void | ||||
761 | merge_latch_edges (class loop *loop) | ||||
762 | { | ||||
763 | vec<edge> latches = get_loop_latch_edges (loop); | ||||
764 | edge latch, e; | ||||
765 | unsigned i; | ||||
766 | |||||
767 | gcc_assert (latches.length () > 0)((void)(!(latches.length () > 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cfgloop.cc" , 767, __FUNCTION__), 0 : 0)); | ||||
768 | |||||
769 | if (latches.length () == 1) | ||||
770 | loop->latch = latches[0]->src; | ||||
771 | else | ||||
772 | { | ||||
773 | if (dump_file) | ||||
774 | fprintf (dump_file, "Merged latch edges of loop %d\n", loop->num); | ||||
775 | |||||
776 | mfb_reis_set = new hash_set<edge>; | ||||
777 | FOR_EACH_VEC_ELT (latches, i, e)for (i = 0; (latches).iterate ((i), &(e)); ++(i)) | ||||
778 | mfb_reis_set->add (e); | ||||
779 | latch = make_forwarder_block (loop->header, mfb_redirect_edges_in_set, | ||||
780 | NULLnullptr); | ||||
781 | delete mfb_reis_set; | ||||
782 | |||||
783 | loop->header = latch->dest; | ||||
784 | loop->latch = latch->src; | ||||
785 | } | ||||
786 | |||||
787 | latches.release (); | ||||
788 | } | ||||
789 | |||||
790 | /* LOOP may have several latch edges. Transform it into (possibly several) | ||||
791 | loops with single latch edge. */ | ||||
792 | |||||
793 | static void | ||||
794 | disambiguate_multiple_latches (class loop *loop) | ||||
795 | { | ||||
796 | edge e; | ||||
797 | |||||
798 | /* We eliminate the multiple latches by splitting the header to the forwarder | ||||
799 | block F and the rest R, and redirecting the edges. There are two cases: | ||||
800 | |||||
801 | 1) If there is a latch edge E that corresponds to a subloop (we guess | ||||
802 | that based on profile -- if it is taken much more often than the | ||||
803 | remaining edges; and on trees, using the information about induction | ||||
804 | variables of the loops), we redirect E to R, all the remaining edges to | ||||
805 | F, then rescan the loops and try again for the outer loop. | ||||
806 | 2) If there is no such edge, we redirect all latch edges to F, and the | ||||
807 | entry edges to R, thus making F the single latch of the loop. */ | ||||
808 | |||||
809 | if (dump_file) | ||||
810 | fprintf (dump_file, "Disambiguating loop %d with multiple latches\n", | ||||
811 | loop->num); | ||||
812 | |||||
813 | /* During latch merging, we may need to redirect the entry edges to a new | ||||
814 | block. This would cause problems if the entry edge was the one from the | ||||
815 | entry block. To avoid having to handle this case specially, split | ||||
816 | such entry edge. */ | ||||
817 | e = find_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_entry_block_ptr), loop->header); | ||||
818 | if (e) | ||||
819 | split_edge (e); | ||||
820 | |||||
821 | while (1) | ||||
822 | { | ||||
823 | e = find_subloop_latch_edge (loop); | ||||
824 | if (!e) | ||||
825 | break; | ||||
826 | |||||
827 | form_subloop (loop, e); | ||||
828 | } | ||||
829 | |||||
830 | merge_latch_edges (loop); | ||||
831 | } | ||||
832 | |||||
833 | /* Split loops with multiple latch edges. */ | ||||
834 | |||||
835 | void | ||||
836 | disambiguate_loops_with_multiple_latches (void) | ||||
837 | { | ||||
838 | for (auto loop : loops_list (cfun(cfun + 0), 0)) | ||||
839 | { | ||||
840 | if (!loop->latch) | ||||
841 | disambiguate_multiple_latches (loop); | ||||
842 | } | ||||
843 | } | ||||
844 | |||||
845 | /* Return nonzero if basic block BB belongs to LOOP. */ | ||||
846 | bool | ||||
847 | flow_bb_inside_loop_p (const class loop *loop, const_basic_block bb) | ||||
848 | { | ||||
849 | class loop *source_loop; | ||||
850 | |||||
851 | if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_entry_block_ptr) | ||||
852 | || bb == EXIT_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_exit_block_ptr)) | ||||
853 | return 0; | ||||
854 | |||||
855 | source_loop = bb->loop_father; | ||||
856 | return loop == source_loop || flow_loop_nested_p (loop, source_loop); | ||||
857 | } | ||||
858 | |||||
859 | /* Enumeration predicate for get_loop_body_with_size. */ | ||||
860 | static bool | ||||
861 | glb_enum_p (const_basic_block bb, const void *glb_loop) | ||||
862 | { | ||||
863 | const class loop *const loop = (const class loop *) glb_loop; | ||||
864 | return (bb != loop->header | ||||
865 | && dominated_by_p (CDI_DOMINATORS, bb, loop->header)); | ||||
866 | } | ||||
867 | |||||
868 | /* Gets basic blocks of a LOOP. Header is the 0-th block, rest is in dfs | ||||
869 | order against direction of edges from latch. Specially, if | ||||
870 | header != latch, latch is the 1-st block. LOOP cannot be the fake | ||||
871 | loop tree root, and its size must be at most MAX_SIZE. The blocks | ||||
872 | in the LOOP body are stored to BODY, and the size of the LOOP is | ||||
873 | returned. */ | ||||
874 | |||||
875 | unsigned | ||||
876 | get_loop_body_with_size (const class loop *loop, basic_block *body, | ||||
877 | unsigned max_size) | ||||
878 | { | ||||
879 | return dfs_enumerate_from (loop->header, 1, glb_enum_p, | ||||
880 | body, max_size, loop); | ||||
881 | } | ||||
882 | |||||
883 | /* Gets basic blocks of a LOOP. Header is the 0-th block, rest is in dfs | ||||
884 | order against direction of edges from latch. Specially, if | ||||
885 | header != latch, latch is the 1-st block. */ | ||||
886 | |||||
887 | basic_block * | ||||
888 | get_loop_body (const class loop *loop) | ||||
889 | { | ||||
890 | basic_block *body, bb; | ||||
891 | unsigned tv = 0; | ||||
892 | |||||
893 | gcc_assert (loop->num_nodes)((void)(!(loop->num_nodes) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cfgloop.cc" , 893, __FUNCTION__), 0 : 0)); | ||||
894 | |||||
895 | body = XNEWVEC (basic_block, loop->num_nodes)((basic_block *) xmalloc (sizeof (basic_block) * (loop->num_nodes ))); | ||||
896 | |||||
897 | if (loop->latch == EXIT_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_exit_block_ptr)) | ||||
898 | { | ||||
899 | /* There may be blocks unreachable from EXIT_BLOCK, hence we need to | ||||
900 | special-case the fake loop that contains the whole function. */ | ||||
901 | gcc_assert (loop->num_nodes == (unsigned) n_basic_blocks_for_fn (cfun))((void)(!(loop->num_nodes == (unsigned) (((cfun + 0))-> cfg->x_n_basic_blocks)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cfgloop.cc" , 901, __FUNCTION__), 0 : 0)); | ||||
902 | body[tv++] = loop->header; | ||||
903 | body[tv++] = EXIT_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_exit_block_ptr); | ||||
904 | FOR_EACH_BB_FN (bb, cfun)for (bb = ((cfun + 0))->cfg->x_entry_block_ptr->next_bb ; bb != ((cfun + 0))->cfg->x_exit_block_ptr; bb = bb-> next_bb) | ||||
905 | body[tv++] = bb; | ||||
906 | } | ||||
907 | else | ||||
908 | tv = get_loop_body_with_size (loop, body, loop->num_nodes); | ||||
909 | |||||
910 | gcc_assert (tv == loop->num_nodes)((void)(!(tv == loop->num_nodes) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cfgloop.cc" , 910, __FUNCTION__), 0 : 0)); | ||||
911 | return body; | ||||
912 | } | ||||
913 | |||||
914 | /* Fills dominance descendants inside LOOP of the basic block BB into | ||||
915 | array TOVISIT from index *TV. */ | ||||
916 | |||||
917 | static void | ||||
918 | fill_sons_in_loop (const class loop *loop, basic_block bb, | ||||
919 | basic_block *tovisit, int *tv) | ||||
920 | { | ||||
921 | basic_block son, postpone = NULLnullptr; | ||||
922 | |||||
923 | tovisit[(*tv)++] = bb; | ||||
924 | for (son = first_dom_son (CDI_DOMINATORS, bb); | ||||
925 | son; | ||||
926 | son = next_dom_son (CDI_DOMINATORS, son)) | ||||
927 | { | ||||
928 | if (!flow_bb_inside_loop_p (loop, son)) | ||||
929 | continue; | ||||
930 | |||||
931 | if (dominated_by_p (CDI_DOMINATORS, loop->latch, son)) | ||||
932 | { | ||||
933 | postpone = son; | ||||
934 | continue; | ||||
935 | } | ||||
936 | fill_sons_in_loop (loop, son, tovisit, tv); | ||||
937 | } | ||||
938 | |||||
939 | if (postpone) | ||||
940 | fill_sons_in_loop (loop, postpone, tovisit, tv); | ||||
941 | } | ||||
942 | |||||
943 | /* Gets body of a LOOP (that must be different from the outermost loop) | ||||
944 | sorted by dominance relation. Additionally, if a basic block s dominates | ||||
945 | the latch, then only blocks dominated by s are be after it. */ | ||||
946 | |||||
947 | basic_block * | ||||
948 | get_loop_body_in_dom_order (const class loop *loop) | ||||
949 | { | ||||
950 | basic_block *tovisit; | ||||
951 | int tv; | ||||
952 | |||||
953 | gcc_assert (loop->num_nodes)((void)(!(loop->num_nodes) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cfgloop.cc" , 953, __FUNCTION__), 0 : 0)); | ||||
954 | |||||
955 | tovisit = XNEWVEC (basic_block, loop->num_nodes)((basic_block *) xmalloc (sizeof (basic_block) * (loop->num_nodes ))); | ||||
956 | |||||
957 | gcc_assert (loop->latch != EXIT_BLOCK_PTR_FOR_FN (cfun))((void)(!(loop->latch != (((cfun + 0))->cfg->x_exit_block_ptr )) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cfgloop.cc" , 957, __FUNCTION__), 0 : 0)); | ||||
958 | |||||
959 | tv = 0; | ||||
960 | fill_sons_in_loop (loop, loop->header, tovisit, &tv); | ||||
961 | |||||
962 | gcc_assert (tv == (int) loop->num_nodes)((void)(!(tv == (int) loop->num_nodes) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cfgloop.cc" , 962, __FUNCTION__), 0 : 0)); | ||||
963 | |||||
964 | return tovisit; | ||||
965 | } | ||||
966 | |||||
967 | /* Gets body of a LOOP sorted via provided BB_COMPARATOR. */ | ||||
968 | |||||
969 | basic_block * | ||||
970 | get_loop_body_in_custom_order (const class loop *loop, | ||||
971 | int (*bb_comparator) (const void *, const void *)) | ||||
972 | { | ||||
973 | basic_block *bbs = get_loop_body (loop); | ||||
974 | |||||
975 | qsort (bbs, loop->num_nodes, sizeof (basic_block), bb_comparator)gcc_qsort (bbs, loop->num_nodes, sizeof (basic_block), bb_comparator ); | ||||
976 | |||||
977 | return bbs; | ||||
978 | } | ||||
979 | |||||
980 | /* Same as above, but use gcc_sort_r instead of qsort. */ | ||||
981 | |||||
982 | basic_block * | ||||
983 | get_loop_body_in_custom_order (const class loop *loop, void *data, | ||||
984 | int (*bb_comparator) (const void *, const void *, void *)) | ||||
985 | { | ||||
986 | basic_block *bbs = get_loop_body (loop); | ||||
987 | |||||
988 | gcc_sort_r (bbs, loop->num_nodes, sizeof (basic_block), bb_comparator, data); | ||||
989 | |||||
990 | return bbs; | ||||
991 | } | ||||
992 | |||||
993 | /* Get body of a LOOP in breadth first sort order. */ | ||||
994 | |||||
995 | basic_block * | ||||
996 | get_loop_body_in_bfs_order (const class loop *loop) | ||||
997 | { | ||||
998 | basic_block *blocks; | ||||
999 | basic_block bb; | ||||
1000 | unsigned int i = 1; | ||||
1001 | unsigned int vc = 0; | ||||
1002 | |||||
1003 | gcc_assert (loop->num_nodes)((void)(!(loop->num_nodes) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cfgloop.cc" , 1003, __FUNCTION__), 0 : 0)); | ||||
1004 | gcc_assert (loop->latch != EXIT_BLOCK_PTR_FOR_FN (cfun))((void)(!(loop->latch != (((cfun + 0))->cfg->x_exit_block_ptr )) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cfgloop.cc" , 1004, __FUNCTION__), 0 : 0)); | ||||
1005 | |||||
1006 | blocks = XNEWVEC (basic_block, loop->num_nodes)((basic_block *) xmalloc (sizeof (basic_block) * (loop->num_nodes ))); | ||||
1007 | auto_bitmap visited; | ||||
1008 | blocks[0] = loop->header; | ||||
1009 | bitmap_set_bit (visited, loop->header->index); | ||||
1010 | while (i < loop->num_nodes) | ||||
1011 | { | ||||
1012 | edge e; | ||||
1013 | edge_iterator ei; | ||||
1014 | gcc_assert (i > vc)((void)(!(i > vc) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cfgloop.cc" , 1014, __FUNCTION__), 0 : 0)); | ||||
1015 | bb = blocks[vc++]; | ||||
1016 | |||||
1017 | FOR_EACH_EDGE (e, ei, bb->succs)for ((ei) = ei_start_1 (&((bb->succs))); ei_cond ((ei) , &(e)); ei_next (&(ei))) | ||||
1018 | { | ||||
1019 | if (flow_bb_inside_loop_p (loop, e->dest)) | ||||
1020 | { | ||||
1021 | /* This bb is now visited. */ | ||||
1022 | if (bitmap_set_bit (visited, e->dest->index)) | ||||
1023 | blocks[i++] = e->dest; | ||||
1024 | } | ||||
1025 | } | ||||
1026 | } | ||||
1027 | |||||
1028 | return blocks; | ||||
1029 | } | ||||
1030 | |||||
1031 | /* Hash function for struct loop_exit. */ | ||||
1032 | |||||
1033 | hashval_t | ||||
1034 | loop_exit_hasher::hash (loop_exit *exit) | ||||
1035 | { | ||||
1036 | return htab_hash_pointer (exit->e); | ||||
1037 | } | ||||
1038 | |||||
1039 | /* Equality function for struct loop_exit. Compares with edge. */ | ||||
1040 | |||||
1041 | bool | ||||
1042 | loop_exit_hasher::equal (loop_exit *exit, edge e) | ||||
1043 | { | ||||
1044 | return exit->e == e; | ||||
1045 | } | ||||
1046 | |||||
1047 | /* Frees the list of loop exit descriptions EX. */ | ||||
1048 | |||||
1049 | void | ||||
1050 | loop_exit_hasher::remove (loop_exit *exit) | ||||
1051 | { | ||||
1052 | loop_exit *next; | ||||
1053 | for (; exit; exit = next) | ||||
1054 | { | ||||
1055 | next = exit->next_e; | ||||
1056 | |||||
1057 | exit->next->prev = exit->prev; | ||||
1058 | exit->prev->next = exit->next; | ||||
1059 | |||||
1060 | ggc_free (exit); | ||||
1061 | } | ||||
1062 | } | ||||
1063 | |||||
1064 | /* Returns the list of records for E as an exit of a loop. */ | ||||
1065 | |||||
1066 | static struct loop_exit * | ||||
1067 | get_exit_descriptions (edge e) | ||||
1068 | { | ||||
1069 | return current_loops((cfun + 0)->x_current_loops)->exits->find_with_hash (e, htab_hash_pointer (e)); | ||||
1070 | } | ||||
1071 | |||||
1072 | /* Updates the lists of loop exits in that E appears. | ||||
1073 | If REMOVED is true, E is being removed, and we | ||||
1074 | just remove it from the lists of exits. | ||||
1075 | If NEW_EDGE is true and E is not a loop exit, we | ||||
1076 | do not try to remove it from loop exit lists. */ | ||||
1077 | |||||
1078 | void | ||||
1079 | rescan_loop_exit (edge e, bool new_edge, bool removed) | ||||
1080 | { | ||||
1081 | struct loop_exit *exits = NULLnullptr, *exit; | ||||
1082 | class loop *aloop, *cloop; | ||||
1083 | |||||
1084 | if (!loops_state_satisfies_p (LOOPS_HAVE_RECORDED_EXITS)) | ||||
1085 | return; | ||||
1086 | |||||
1087 | if (!removed | ||||
1088 | && e->src->loop_father != NULLnullptr | ||||
1089 | && e->dest->loop_father != NULLnullptr | ||||
1090 | && !flow_bb_inside_loop_p (e->src->loop_father, e->dest)) | ||||
1091 | { | ||||
1092 | cloop = find_common_loop (e->src->loop_father, e->dest->loop_father); | ||||
1093 | for (aloop = e->src->loop_father; | ||||
1094 | aloop != cloop; | ||||
1095 | aloop = loop_outer (aloop)) | ||||
1096 | { | ||||
1097 | exit = ggc_alloc<loop_exit> (); | ||||
1098 | exit->e = e; | ||||
1099 | |||||
1100 | exit->next = aloop->exits->next; | ||||
1101 | exit->prev = aloop->exits; | ||||
1102 | exit->next->prev = exit; | ||||
1103 | exit->prev->next = exit; | ||||
1104 | |||||
1105 | exit->next_e = exits; | ||||
1106 | exits = exit; | ||||
1107 | } | ||||
1108 | } | ||||
1109 | |||||
1110 | if (!exits && new_edge) | ||||
1111 | return; | ||||
1112 | |||||
1113 | loop_exit **slot | ||||
1114 | = current_loops((cfun + 0)->x_current_loops)->exits->find_slot_with_hash (e, htab_hash_pointer (e), | ||||
1115 | exits ? INSERT : NO_INSERT); | ||||
1116 | if (!slot) | ||||
1117 | return; | ||||
1118 | |||||
1119 | if (exits) | ||||
1120 | { | ||||
1121 | if (*slot) | ||||
1122 | loop_exit_hasher::remove (*slot); | ||||
1123 | *slot = exits; | ||||
1124 | } | ||||
1125 | else | ||||
1126 | current_loops((cfun + 0)->x_current_loops)->exits->clear_slot (slot); | ||||
1127 | } | ||||
1128 | |||||
1129 | /* For each loop, record list of exit edges, and start maintaining these | ||||
1130 | lists. */ | ||||
1131 | |||||
1132 | void | ||||
1133 | record_loop_exits (void) | ||||
1134 | { | ||||
1135 | basic_block bb; | ||||
1136 | edge_iterator ei; | ||||
1137 | edge e; | ||||
1138 | |||||
1139 | if (!current_loops((cfun + 0)->x_current_loops)) | ||||
1140 | return; | ||||
1141 | |||||
1142 | if (loops_state_satisfies_p (LOOPS_HAVE_RECORDED_EXITS)) | ||||
1143 | return; | ||||
1144 | loops_state_set (LOOPS_HAVE_RECORDED_EXITS); | ||||
1145 | |||||
1146 | gcc_assert (current_loops->exits == NULL)((void)(!(((cfun + 0)->x_current_loops)->exits == nullptr ) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cfgloop.cc" , 1146, __FUNCTION__), 0 : 0)); | ||||
1147 | current_loops((cfun + 0)->x_current_loops)->exits | ||||
1148 | = hash_table<loop_exit_hasher>::create_ggc (2 * number_of_loops (cfun(cfun + 0))); | ||||
1149 | |||||
1150 | FOR_EACH_BB_FN (bb, cfun)for (bb = ((cfun + 0))->cfg->x_entry_block_ptr->next_bb ; bb != ((cfun + 0))->cfg->x_exit_block_ptr; bb = bb-> next_bb) | ||||
1151 | { | ||||
1152 | FOR_EACH_EDGE (e, ei, bb->succs)for ((ei) = ei_start_1 (&((bb->succs))); ei_cond ((ei) , &(e)); ei_next (&(ei))) | ||||
1153 | { | ||||
1154 | rescan_loop_exit (e, true, false); | ||||
1155 | } | ||||
1156 | } | ||||
1157 | } | ||||
1158 | |||||
1159 | /* Dumps information about the exit in *SLOT to FILE. | ||||
1160 | Callback for htab_traverse. */ | ||||
1161 | |||||
1162 | int | ||||
1163 | dump_recorded_exit (loop_exit **slot, FILE *file) | ||||
1164 | { | ||||
1165 | struct loop_exit *exit = *slot; | ||||
1166 | unsigned n = 0; | ||||
1167 | edge e = exit->e; | ||||
1168 | |||||
1169 | for (; exit != NULLnullptr; exit = exit->next_e) | ||||
1170 | n++; | ||||
1171 | |||||
1172 | fprintf (file, "Edge %d->%d exits %u loops\n", | ||||
1173 | e->src->index, e->dest->index, n); | ||||
1174 | |||||
1175 | return 1; | ||||
1176 | } | ||||
1177 | |||||
1178 | /* Dumps the recorded exits of loops to FILE. */ | ||||
1179 | |||||
1180 | extern void dump_recorded_exits (FILE *); | ||||
1181 | void | ||||
1182 | dump_recorded_exits (FILE *file) | ||||
1183 | { | ||||
1184 | if (!current_loops((cfun + 0)->x_current_loops)->exits) | ||||
1185 | return; | ||||
1186 | current_loops((cfun + 0)->x_current_loops)->exits->traverse<FILE *, dump_recorded_exit> (file); | ||||
1187 | } | ||||
1188 | |||||
1189 | /* Releases lists of loop exits. */ | ||||
1190 | |||||
1191 | void | ||||
1192 | release_recorded_exits (function *fn) | ||||
1193 | { | ||||
1194 | gcc_assert (loops_state_satisfies_p (fn, LOOPS_HAVE_RECORDED_EXITS))((void)(!(loops_state_satisfies_p (fn, LOOPS_HAVE_RECORDED_EXITS )) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cfgloop.cc" , 1194, __FUNCTION__), 0 : 0)); | ||||
1195 | loops_for_fn (fn)->exits->empty (); | ||||
1196 | loops_for_fn (fn)->exits = NULLnullptr; | ||||
1197 | loops_state_clear (fn, LOOPS_HAVE_RECORDED_EXITS); | ||||
1198 | } | ||||
1199 | |||||
1200 | /* Returns the list of the exit edges of a LOOP. */ | ||||
1201 | |||||
1202 | auto_vec<edge> | ||||
1203 | get_loop_exit_edges (const class loop *loop, basic_block *body) | ||||
1204 | { | ||||
1205 | auto_vec<edge> edges; | ||||
1206 | edge e; | ||||
1207 | unsigned i; | ||||
1208 | edge_iterator ei; | ||||
1209 | struct loop_exit *exit; | ||||
1210 | |||||
1211 | gcc_assert (loop->latch != EXIT_BLOCK_PTR_FOR_FN (cfun))((void)(!(loop->latch != (((cfun + 0))->cfg->x_exit_block_ptr )) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cfgloop.cc" , 1211, __FUNCTION__), 0 : 0)); | ||||
1212 | |||||
1213 | /* If we maintain the lists of exits, use them. Otherwise we must | ||||
1214 | scan the body of the loop. */ | ||||
1215 | if (loops_state_satisfies_p (LOOPS_HAVE_RECORDED_EXITS)) | ||||
1216 | { | ||||
1217 | for (exit = loop->exits->next; exit->e; exit = exit->next) | ||||
1218 | edges.safe_push (exit->e); | ||||
1219 | } | ||||
1220 | else | ||||
1221 | { | ||||
1222 | bool body_from_caller = true; | ||||
1223 | if (!body) | ||||
1224 | { | ||||
1225 | body = get_loop_body (loop); | ||||
1226 | body_from_caller = false; | ||||
1227 | } | ||||
1228 | for (i = 0; i < loop->num_nodes; i++) | ||||
1229 | FOR_EACH_EDGE (e, ei, body[i]->succs)for ((ei) = ei_start_1 (&((body[i]->succs))); ei_cond ( (ei), &(e)); ei_next (&(ei))) | ||||
1230 | { | ||||
1231 | if (!flow_bb_inside_loop_p (loop, e->dest)) | ||||
1232 | edges.safe_push (e); | ||||
1233 | } | ||||
1234 | if (!body_from_caller) | ||||
1235 | free (body); | ||||
1236 | } | ||||
1237 | |||||
1238 | return edges; | ||||
1239 | } | ||||
1240 | |||||
1241 | /* Counts the number of conditional branches inside LOOP. */ | ||||
1242 | |||||
1243 | unsigned | ||||
1244 | num_loop_branches (const class loop *loop) | ||||
1245 | { | ||||
1246 | unsigned i, n; | ||||
1247 | basic_block * body; | ||||
1248 | |||||
1249 | gcc_assert (loop->latch != EXIT_BLOCK_PTR_FOR_FN (cfun))((void)(!(loop->latch != (((cfun + 0))->cfg->x_exit_block_ptr )) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cfgloop.cc" , 1249, __FUNCTION__), 0 : 0)); | ||||
1250 | |||||
1251 | body = get_loop_body (loop); | ||||
1252 | n = 0; | ||||
1253 | for (i = 0; i < loop->num_nodes; i++) | ||||
1254 | if (EDGE_COUNT (body[i]->succs)vec_safe_length (body[i]->succs) >= 2) | ||||
1255 | n++; | ||||
1256 | free (body); | ||||
1257 | |||||
1258 | return n; | ||||
1259 | } | ||||
1260 | |||||
1261 | /* Adds basic block BB to LOOP. */ | ||||
1262 | void | ||||
1263 | add_bb_to_loop (basic_block bb, class loop *loop) | ||||
1264 | { | ||||
1265 | unsigned i; | ||||
1266 | loop_p ploop; | ||||
1267 | edge_iterator ei; | ||||
1268 | edge e; | ||||
1269 | |||||
1270 | gcc_assert (bb->loop_father == NULL)((void)(!(bb->loop_father == nullptr) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cfgloop.cc" , 1270, __FUNCTION__), 0 : 0)); | ||||
1271 | bb->loop_father = loop; | ||||
1272 | loop->num_nodes++; | ||||
1273 | FOR_EACH_VEC_SAFE_ELT (loop->superloops, i, ploop)for (i = 0; vec_safe_iterate ((loop->superloops), (i), & (ploop)); ++(i)) | ||||
1274 | ploop->num_nodes++; | ||||
1275 | |||||
1276 | FOR_EACH_EDGE (e, ei, bb->succs)for ((ei) = ei_start_1 (&((bb->succs))); ei_cond ((ei) , &(e)); ei_next (&(ei))) | ||||
1277 | { | ||||
1278 | rescan_loop_exit (e, true, false); | ||||
1279 | } | ||||
1280 | FOR_EACH_EDGE (e, ei, bb->preds)for ((ei) = ei_start_1 (&((bb->preds))); ei_cond ((ei) , &(e)); ei_next (&(ei))) | ||||
1281 | { | ||||
1282 | rescan_loop_exit (e, true, false); | ||||
1283 | } | ||||
1284 | } | ||||
1285 | |||||
1286 | /* Remove basic block BB from loops. */ | ||||
1287 | void | ||||
1288 | remove_bb_from_loops (basic_block bb) | ||||
1289 | { | ||||
1290 | unsigned i; | ||||
1291 | class loop *loop = bb->loop_father; | ||||
1292 | loop_p ploop; | ||||
1293 | edge_iterator ei; | ||||
1294 | edge e; | ||||
1295 | |||||
1296 | gcc_assert (loop != NULL)((void)(!(loop != nullptr) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cfgloop.cc" , 1296, __FUNCTION__), 0 : 0)); | ||||
1297 | loop->num_nodes--; | ||||
1298 | FOR_EACH_VEC_SAFE_ELT (loop->superloops, i, ploop)for (i = 0; vec_safe_iterate ((loop->superloops), (i), & (ploop)); ++(i)) | ||||
1299 | ploop->num_nodes--; | ||||
1300 | bb->loop_father = NULLnullptr; | ||||
1301 | |||||
1302 | FOR_EACH_EDGE (e, ei, bb->succs)for ((ei) = ei_start_1 (&((bb->succs))); ei_cond ((ei) , &(e)); ei_next (&(ei))) | ||||
1303 | { | ||||
1304 | rescan_loop_exit (e, false, true); | ||||
1305 | } | ||||
1306 | FOR_EACH_EDGE (e, ei, bb->preds)for ((ei) = ei_start_1 (&((bb->preds))); ei_cond ((ei) , &(e)); ei_next (&(ei))) | ||||
1307 | { | ||||
1308 | rescan_loop_exit (e, false, true); | ||||
1309 | } | ||||
1310 | } | ||||
1311 | |||||
1312 | /* Finds nearest common ancestor in loop tree for given loops. */ | ||||
1313 | class loop * | ||||
1314 | find_common_loop (class loop *loop_s, class loop *loop_d) | ||||
1315 | { | ||||
1316 | unsigned sdepth, ddepth; | ||||
1317 | |||||
1318 | if (!loop_s) return loop_d; | ||||
1319 | if (!loop_d) return loop_s; | ||||
1320 | |||||
1321 | sdepth = loop_depth (loop_s); | ||||
1322 | ddepth = loop_depth (loop_d); | ||||
1323 | |||||
1324 | if (sdepth < ddepth) | ||||
1325 | loop_d = (*loop_d->superloops)[sdepth]; | ||||
1326 | else if (sdepth > ddepth) | ||||
1327 | loop_s = (*loop_s->superloops)[ddepth]; | ||||
1328 | |||||
1329 | while (loop_s != loop_d) | ||||
1330 | { | ||||
1331 | loop_s = loop_outer (loop_s); | ||||
1332 | loop_d = loop_outer (loop_d); | ||||
1333 | } | ||||
1334 | return loop_s; | ||||
1335 | } | ||||
1336 | |||||
1337 | /* Removes LOOP from structures and frees its data. */ | ||||
1338 | |||||
1339 | void | ||||
1340 | delete_loop (class loop *loop) | ||||
1341 | { | ||||
1342 | /* Remove the loop from structure. */ | ||||
1343 | flow_loop_tree_node_remove (loop); | ||||
1344 | |||||
1345 | /* Remove loop from loops array. */ | ||||
1346 | (*current_loops((cfun + 0)->x_current_loops)->larray)[loop->num] = NULLnullptr; | ||||
1347 | |||||
1348 | /* Free loop data. */ | ||||
1349 | flow_loop_free (loop); | ||||
1350 | } | ||||
1351 | |||||
1352 | /* Cancels the LOOP; it must be innermost one. */ | ||||
1353 | |||||
1354 | static void | ||||
1355 | cancel_loop (class loop *loop) | ||||
1356 | { | ||||
1357 | basic_block *bbs; | ||||
1358 | unsigned i; | ||||
1359 | class loop *outer = loop_outer (loop); | ||||
1360 | |||||
1361 | gcc_assert (!loop->inner)((void)(!(!loop->inner) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cfgloop.cc" , 1361, __FUNCTION__), 0 : 0)); | ||||
1362 | |||||
1363 | /* Move blocks up one level (they should be removed as soon as possible). */ | ||||
1364 | bbs = get_loop_body (loop); | ||||
1365 | for (i = 0; i < loop->num_nodes; i++) | ||||
1366 | bbs[i]->loop_father = outer; | ||||
1367 | |||||
1368 | free (bbs); | ||||
1369 | delete_loop (loop); | ||||
1370 | } | ||||
1371 | |||||
1372 | /* Cancels LOOP and all its subloops. */ | ||||
1373 | void | ||||
1374 | cancel_loop_tree (class loop *loop) | ||||
1375 | { | ||||
1376 | while (loop->inner) | ||||
1377 | cancel_loop_tree (loop->inner); | ||||
1378 | cancel_loop (loop); | ||||
1379 | } | ||||
1380 | |||||
1381 | /* Disable warnings about missing quoting in GCC diagnostics for | ||||
1382 | the verification errors. Their format strings don't follow GCC | ||||
1383 | diagnostic conventions and the calls are ultimately followed by | ||||
1384 | a deliberate ICE triggered by a failed assertion. */ | ||||
1385 | #if __GNUC__4 >= 10 | ||||
1386 | # pragma GCC diagnostic push | ||||
1387 | # pragma GCC diagnostic ignored "-Wformat-diag" | ||||
1388 | #endif | ||||
1389 | |||||
1390 | /* Checks that information about loops is correct | ||||
1391 | -- sizes of loops are all right | ||||
1392 | -- results of get_loop_body really belong to the loop | ||||
1393 | -- loop header have just single entry edge and single latch edge | ||||
1394 | -- loop latches have only single successor that is header of their loop | ||||
1395 | -- irreducible loops are correctly marked | ||||
1396 | -- the cached loop depth and loop father of each bb is correct | ||||
1397 | */ | ||||
1398 | DEBUG_FUNCTION__attribute__ ((__used__)) void | ||||
1399 | verify_loop_structure (void) | ||||
1400 | { | ||||
1401 | unsigned *sizes, i, j; | ||||
1402 | basic_block bb, *bbs; | ||||
1403 | int err = 0; | ||||
1404 | edge e; | ||||
1405 | unsigned num = number_of_loops (cfun(cfun + 0)); | ||||
1406 | struct loop_exit *exit, *mexit; | ||||
1407 | bool dom_available = dom_info_available_p (CDI_DOMINATORS); | ||||
1408 | |||||
1409 | if (loops_state_satisfies_p (LOOPS_NEED_FIXUP)) | ||||
1410 | { | ||||
1411 | error ("loop verification on loop tree that needs fixup"); | ||||
1412 | err = 1; | ||||
1413 | } | ||||
1414 | |||||
1415 | /* We need up-to-date dominators, compute or verify them. */ | ||||
1416 | if (!dom_available) | ||||
1417 | calculate_dominance_info (CDI_DOMINATORS); | ||||
1418 | else | ||||
1419 | verify_dominators (CDI_DOMINATORS); | ||||
1420 | |||||
1421 | /* Check the loop tree root. */ | ||||
1422 | if (current_loops((cfun + 0)->x_current_loops)->tree_root->header != ENTRY_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_entry_block_ptr) | ||||
1423 | || current_loops((cfun + 0)->x_current_loops)->tree_root->latch != EXIT_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_exit_block_ptr) | ||||
1424 | || (current_loops((cfun + 0)->x_current_loops)->tree_root->num_nodes | ||||
1425 | != (unsigned) n_basic_blocks_for_fn (cfun)(((cfun + 0))->cfg->x_n_basic_blocks))) | ||||
1426 | { | ||||
1427 | error ("corrupt loop tree root"); | ||||
1428 | err = 1; | ||||
1429 | } | ||||
1430 | |||||
1431 | /* Check the headers. */ | ||||
1432 | FOR_EACH_BB_FN (bb, cfun)for (bb = ((cfun + 0))->cfg->x_entry_block_ptr->next_bb ; bb != ((cfun + 0))->cfg->x_exit_block_ptr; bb = bb-> next_bb) | ||||
1433 | if (bb_loop_header_p (bb)) | ||||
1434 | { | ||||
1435 | if (bb->loop_father->header == NULLnullptr) | ||||
1436 | { | ||||
1437 | error ("loop with header %d marked for removal", bb->index); | ||||
1438 | err = 1; | ||||
1439 | } | ||||
1440 | else if (bb->loop_father->header != bb) | ||||
1441 | { | ||||
1442 | error ("loop with header %d not in loop tree", bb->index); | ||||
1443 | err = 1; | ||||
1444 | } | ||||
1445 | } | ||||
1446 | else if (bb->loop_father->header == bb) | ||||
1447 | { | ||||
1448 | error ("non-loop with header %d not marked for removal", bb->index); | ||||
1449 | err = 1; | ||||
1450 | } | ||||
1451 | |||||
1452 | /* Check the recorded loop father and sizes of loops. */ | ||||
1453 | auto_sbitmap visited (last_basic_block_for_fn (cfun)(((cfun + 0))->cfg->x_last_basic_block)); | ||||
1454 | bitmap_clear (visited); | ||||
1455 | bbs = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun))((basic_block *) xmalloc (sizeof (basic_block) * ((((cfun + 0 ))->cfg->x_n_basic_blocks)))); | ||||
1456 | for (auto loop : loops_list (cfun(cfun + 0), LI_FROM_INNERMOST)) | ||||
1457 | { | ||||
1458 | unsigned n; | ||||
1459 | |||||
1460 | if (loop->header == NULLnullptr) | ||||
1461 | { | ||||
1462 | error ("removed loop %d in loop tree", loop->num); | ||||
1463 | err = 1; | ||||
1464 | continue; | ||||
1465 | } | ||||
1466 | |||||
1467 | n = get_loop_body_with_size (loop, bbs, n_basic_blocks_for_fn (cfun)(((cfun + 0))->cfg->x_n_basic_blocks)); | ||||
1468 | if (loop->num_nodes != n) | ||||
1469 | { | ||||
1470 | error ("size of loop %d should be %d, not %d", | ||||
1471 | loop->num, n, loop->num_nodes); | ||||
1472 | err = 1; | ||||
1473 | } | ||||
1474 | |||||
1475 | for (j = 0; j < n; j++) | ||||
1476 | { | ||||
1477 | bb = bbs[j]; | ||||
1478 | |||||
1479 | if (!flow_bb_inside_loop_p (loop, bb)) | ||||
1480 | { | ||||
1481 | error ("bb %d does not belong to loop %d", | ||||
1482 | bb->index, loop->num); | ||||
1483 | err = 1; | ||||
1484 | } | ||||
1485 | |||||
1486 | /* Ignore this block if it is in an inner loop. */ | ||||
1487 | if (bitmap_bit_p (visited, bb->index)) | ||||
1488 | continue; | ||||
1489 | bitmap_set_bit (visited, bb->index); | ||||
1490 | |||||
1491 | if (bb->loop_father != loop) | ||||
1492 | { | ||||
1493 | error ("bb %d has father loop %d, should be loop %d", | ||||
1494 | bb->index, bb->loop_father->num, loop->num); | ||||
1495 | err = 1; | ||||
1496 | } | ||||
1497 | } | ||||
1498 | } | ||||
1499 | free (bbs); | ||||
1500 | |||||
1501 | /* Check headers and latches. */ | ||||
1502 | for (auto loop : loops_list (cfun(cfun + 0), 0)) | ||||
1503 | { | ||||
1504 | i = loop->num; | ||||
1505 | if (loop->header == NULLnullptr) | ||||
1506 | continue; | ||||
1507 | if (!bb_loop_header_p (loop->header)) | ||||
1508 | { | ||||
1509 | error ("loop %d%'s header is not a loop header", i); | ||||
1510 | err = 1; | ||||
1511 | } | ||||
1512 | if (loops_state_satisfies_p (LOOPS_HAVE_PREHEADERS) | ||||
1513 | && EDGE_COUNT (loop->header->preds)vec_safe_length (loop->header->preds) != 2) | ||||
1514 | { | ||||
1515 | error ("loop %d%'s header does not have exactly 2 entries", i); | ||||
1516 | err = 1; | ||||
1517 | } | ||||
1518 | if (loop->latch) | ||||
1519 | { | ||||
1520 | if (!find_edge (loop->latch, loop->header)) | ||||
1521 | { | ||||
1522 | error ("loop %d%'s latch does not have an edge to its header", i); | ||||
1523 | err = 1; | ||||
1524 | } | ||||
1525 | if (!dominated_by_p (CDI_DOMINATORS, loop->latch, loop->header)) | ||||
1526 | { | ||||
1527 | error ("loop %d%'s latch is not dominated by its header", i); | ||||
1528 | err = 1; | ||||
1529 | } | ||||
1530 | } | ||||
1531 | if (loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)) | ||||
1532 | { | ||||
1533 | if (!single_succ_p (loop->latch)) | ||||
1534 | { | ||||
1535 | error ("loop %d%'s latch does not have exactly 1 successor", i); | ||||
1536 | err = 1; | ||||
1537 | } | ||||
1538 | if (single_succ (loop->latch) != loop->header) | ||||
1539 | { | ||||
1540 | error ("loop %d%'s latch does not have header as successor", i); | ||||
1541 | err = 1; | ||||
1542 | } | ||||
1543 | if (loop->latch->loop_father != loop) | ||||
1544 | { | ||||
1545 | error ("loop %d%'s latch does not belong directly to it", i); | ||||
1546 | err = 1; | ||||
1547 | } | ||||
1548 | } | ||||
1549 | if (loop->header->loop_father != loop) | ||||
1550 | { | ||||
1551 | error ("loop %d%'s header does not belong directly to it", i); | ||||
1552 | err = 1; | ||||
1553 | } | ||||
1554 | if (loops_state_satisfies_p (LOOPS_HAVE_MARKED_IRREDUCIBLE_REGIONS)) | ||||
1555 | { | ||||
1556 | edge_iterator ei; | ||||
1557 | FOR_EACH_EDGE (e, ei, loop->header->preds)for ((ei) = ei_start_1 (&((loop->header->preds))); ei_cond ((ei), &(e)); ei_next (&(ei))) | ||||
1558 | if (dominated_by_p (CDI_DOMINATORS, e->src, loop->header) | ||||
1559 | && e->flags & EDGE_IRREDUCIBLE_LOOP) | ||||
1560 | { | ||||
1561 | error ("loop %d%'s latch is marked as part of irreducible" | ||||
1562 | " region", i); | ||||
1563 | err = 1; | ||||
1564 | } | ||||
1565 | } | ||||
1566 | |||||
1567 | /* Check cached number of iterations for released SSA names. */ | ||||
1568 | tree ref; | ||||
1569 | if (loop->nb_iterations | ||||
1570 | && (ref = walk_tree (&loop->nb_iterations,walk_tree_1 (&loop->nb_iterations, find_released_ssa_name , nullptr, nullptr, nullptr) | ||||
1571 | find_released_ssa_name, NULL, NULL)walk_tree_1 (&loop->nb_iterations, find_released_ssa_name , nullptr, nullptr, nullptr))) | ||||
1572 | { | ||||
1573 | error ("loop %d%'s number of iterations %qE references the" | ||||
1574 | " released SSA name %qE", i, loop->nb_iterations, ref); | ||||
1575 | err = 1; | ||||
1576 | } | ||||
1577 | } | ||||
1578 | |||||
1579 | /* Check irreducible loops. */ | ||||
1580 | if (loops_state_satisfies_p (LOOPS_HAVE_MARKED_IRREDUCIBLE_REGIONS)) | ||||
1581 | { | ||||
1582 | auto_edge_flag saved_edge_irr (cfun(cfun + 0)); | ||||
1583 | auto_bb_flag saved_bb_irr (cfun(cfun + 0)); | ||||
1584 | /* Save old info. */ | ||||
1585 | FOR_EACH_BB_FN (bb, cfun)for (bb = ((cfun + 0))->cfg->x_entry_block_ptr->next_bb ; bb != ((cfun + 0))->cfg->x_exit_block_ptr; bb = bb-> next_bb) | ||||
1586 | { | ||||
1587 | edge_iterator ei; | ||||
1588 | if (bb->flags & BB_IRREDUCIBLE_LOOP) | ||||
1589 | bb->flags |= saved_bb_irr; | ||||
1590 | FOR_EACH_EDGE (e, ei, bb->succs)for ((ei) = ei_start_1 (&((bb->succs))); ei_cond ((ei) , &(e)); ei_next (&(ei))) | ||||
1591 | if (e->flags & EDGE_IRREDUCIBLE_LOOP) | ||||
1592 | e->flags |= saved_edge_irr; | ||||
1593 | } | ||||
1594 | |||||
1595 | /* Recount it. */ | ||||
1596 | mark_irreducible_loops (); | ||||
1597 | |||||
1598 | /* Compare. */ | ||||
1599 | FOR_EACH_BB_FN (bb, cfun)for (bb = ((cfun + 0))->cfg->x_entry_block_ptr->next_bb ; bb != ((cfun + 0))->cfg->x_exit_block_ptr; bb = bb-> next_bb) | ||||
1600 | { | ||||
1601 | edge_iterator ei; | ||||
1602 | |||||
1603 | if ((bb->flags & BB_IRREDUCIBLE_LOOP) | ||||
1604 | && !(bb->flags & saved_bb_irr)) | ||||
1605 | { | ||||
1606 | error ("basic block %d should be marked irreducible", bb->index); | ||||
1607 | err = 1; | ||||
1608 | } | ||||
1609 | else if (!(bb->flags & BB_IRREDUCIBLE_LOOP) | ||||
1610 | && (bb->flags & saved_bb_irr)) | ||||
1611 | { | ||||
1612 | error ("basic block %d should not be marked irreducible", bb->index); | ||||
1613 | err = 1; | ||||
1614 | } | ||||
1615 | bb->flags &= ~saved_bb_irr; | ||||
1616 | FOR_EACH_EDGE (e, ei, bb->succs)for ((ei) = ei_start_1 (&((bb->succs))); ei_cond ((ei) , &(e)); ei_next (&(ei))) | ||||
1617 | { | ||||
1618 | if ((e->flags & EDGE_IRREDUCIBLE_LOOP) | ||||
1619 | && !(e->flags & saved_edge_irr)) | ||||
1620 | { | ||||
1621 | error ("edge from %d to %d should be marked irreducible", | ||||
1622 | e->src->index, e->dest->index); | ||||
1623 | err = 1; | ||||
1624 | } | ||||
1625 | else if (!(e->flags & EDGE_IRREDUCIBLE_LOOP) | ||||
1626 | && (e->flags & saved_edge_irr)) | ||||
1627 | { | ||||
1628 | error ("edge from %d to %d should not be marked irreducible", | ||||
1629 | e->src->index, e->dest->index); | ||||
1630 | err = 1; | ||||
1631 | } | ||||
1632 | e->flags &= ~saved_edge_irr; | ||||
1633 | } | ||||
1634 | } | ||||
1635 | } | ||||
1636 | |||||
1637 | /* Check the recorded loop exits. */ | ||||
1638 | for (auto loop : loops_list (cfun(cfun + 0), 0)) | ||||
1639 | { | ||||
1640 | if (!loop->exits || loop->exits->e != NULLnullptr) | ||||
1641 | { | ||||
1642 | error ("corrupted head of the exits list of loop %d", | ||||
1643 | loop->num); | ||||
1644 | err = 1; | ||||
1645 | } | ||||
1646 | else | ||||
1647 | { | ||||
1648 | /* Check that the list forms a cycle, and all elements except | ||||
1649 | for the head are nonnull. */ | ||||
1650 | for (mexit = loop->exits, exit = mexit->next, i = 0; | ||||
1651 | exit->e && exit != mexit; | ||||
1652 | exit = exit->next) | ||||
1653 | { | ||||
1654 | if (i++ & 1) | ||||
1655 | mexit = mexit->next; | ||||
1656 | } | ||||
1657 | |||||
1658 | if (exit != loop->exits) | ||||
1659 | { | ||||
1660 | error ("corrupted exits list of loop %d", loop->num); | ||||
1661 | err = 1; | ||||
1662 | } | ||||
1663 | } | ||||
1664 | |||||
1665 | if (!loops_state_satisfies_p (LOOPS_HAVE_RECORDED_EXITS)) | ||||
1666 | { | ||||
1667 | if (loop->exits->next != loop->exits) | ||||
1668 | { | ||||
1669 | error ("nonempty exits list of loop %d, but exits are not recorded", | ||||
1670 | loop->num); | ||||
1671 | err = 1; | ||||
1672 | } | ||||
1673 | } | ||||
1674 | } | ||||
1675 | |||||
1676 | if (loops_state_satisfies_p (LOOPS_HAVE_RECORDED_EXITS)) | ||||
1677 | { | ||||
1678 | unsigned n_exits = 0, eloops; | ||||
1679 | |||||
1680 | sizes = XCNEWVEC (unsigned, num)((unsigned *) xcalloc ((num), sizeof (unsigned))); | ||||
1681 | memset (sizes, 0, sizeof (unsigned) * num); | ||||
1682 | FOR_EACH_BB_FN (bb, cfun)for (bb = ((cfun + 0))->cfg->x_entry_block_ptr->next_bb ; bb != ((cfun + 0))->cfg->x_exit_block_ptr; bb = bb-> next_bb) | ||||
1683 | { | ||||
1684 | edge_iterator ei; | ||||
1685 | if (bb->loop_father == current_loops((cfun + 0)->x_current_loops)->tree_root) | ||||
1686 | continue; | ||||
1687 | FOR_EACH_EDGE (e, ei, bb->succs)for ((ei) = ei_start_1 (&((bb->succs))); ei_cond ((ei) , &(e)); ei_next (&(ei))) | ||||
1688 | { | ||||
1689 | if (flow_bb_inside_loop_p (bb->loop_father, e->dest)) | ||||
1690 | continue; | ||||
1691 | |||||
1692 | n_exits++; | ||||
1693 | exit = get_exit_descriptions (e); | ||||
1694 | if (!exit) | ||||
1695 | { | ||||
1696 | error ("exit %d->%d not recorded", | ||||
1697 | e->src->index, e->dest->index); | ||||
1698 | err = 1; | ||||
1699 | } | ||||
1700 | eloops = 0; | ||||
1701 | for (; exit; exit = exit->next_e) | ||||
1702 | eloops++; | ||||
1703 | |||||
1704 | for (class loop *loop = bb->loop_father; | ||||
1705 | loop != e->dest->loop_father | ||||
1706 | /* When a loop exit is also an entry edge which | ||||
1707 | can happen when avoiding CFG manipulations | ||||
1708 | then the last loop exited is the outer loop | ||||
1709 | of the loop entered. */ | ||||
1710 | && loop != loop_outer (e->dest->loop_father); | ||||
1711 | loop = loop_outer (loop)) | ||||
1712 | { | ||||
1713 | eloops--; | ||||
1714 | sizes[loop->num]++; | ||||
1715 | } | ||||
1716 | |||||
1717 | if (eloops != 0) | ||||
1718 | { | ||||
1719 | error ("wrong list of exited loops for edge %d->%d", | ||||
1720 | e->src->index, e->dest->index); | ||||
1721 | err = 1; | ||||
1722 | } | ||||
1723 | } | ||||
1724 | } | ||||
1725 | |||||
1726 | if (n_exits != current_loops((cfun + 0)->x_current_loops)->exits->elements ()) | ||||
1727 | { | ||||
1728 | error ("too many loop exits recorded"); | ||||
1729 | err = 1; | ||||
1730 | } | ||||
1731 | |||||
1732 | for (auto loop : loops_list (cfun(cfun + 0), 0)) | ||||
1733 | { | ||||
1734 | eloops = 0; | ||||
1735 | for (exit = loop->exits->next; exit->e; exit = exit->next) | ||||
1736 | eloops++; | ||||
1737 | if (eloops != sizes[loop->num]) | ||||
1738 | { | ||||
1739 | error ("%d exits recorded for loop %d (having %d exits)", | ||||
1740 | eloops, loop->num, sizes[loop->num]); | ||||
1741 | err = 1; | ||||
1742 | } | ||||
1743 | } | ||||
1744 | |||||
1745 | free (sizes); | ||||
1746 | } | ||||
1747 | |||||
1748 | gcc_assert (!err)((void)(!(!err) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cfgloop.cc" , 1748, __FUNCTION__), 0 : 0)); | ||||
1749 | |||||
1750 | if (!dom_available) | ||||
1751 | free_dominance_info (CDI_DOMINATORS); | ||||
1752 | } | ||||
1753 | |||||
1754 | #if __GNUC__4 >= 10 | ||||
1755 | # pragma GCC diagnostic pop | ||||
1756 | #endif | ||||
1757 | |||||
1758 | /* Returns latch edge of LOOP. */ | ||||
1759 | edge | ||||
1760 | loop_latch_edge (const class loop *loop) | ||||
1761 | { | ||||
1762 | return find_edge (loop->latch, loop->header); | ||||
1763 | } | ||||
1764 | |||||
1765 | /* Returns preheader edge of LOOP. */ | ||||
1766 | edge | ||||
1767 | loop_preheader_edge (const class loop *loop) | ||||
1768 | { | ||||
1769 | edge e; | ||||
1770 | edge_iterator ei; | ||||
1771 | |||||
1772 | gcc_assert (loops_state_satisfies_p (LOOPS_HAVE_PREHEADERS)((void)(!(loops_state_satisfies_p (LOOPS_HAVE_PREHEADERS) && ! loops_state_satisfies_p (LOOPS_MAY_HAVE_MULTIPLE_LATCHES)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cfgloop.cc" , 1773, __FUNCTION__), 0 : 0)) | ||||
1773 | && ! loops_state_satisfies_p (LOOPS_MAY_HAVE_MULTIPLE_LATCHES))((void)(!(loops_state_satisfies_p (LOOPS_HAVE_PREHEADERS) && ! loops_state_satisfies_p (LOOPS_MAY_HAVE_MULTIPLE_LATCHES)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cfgloop.cc" , 1773, __FUNCTION__), 0 : 0)); | ||||
1774 | |||||
1775 | FOR_EACH_EDGE (e, ei, loop->header->preds)for ((ei) = ei_start_1 (&((loop->header->preds))); ei_cond ((ei), &(e)); ei_next (&(ei))) | ||||
1776 | if (e->src != loop->latch) | ||||
1777 | break; | ||||
1778 | |||||
1779 | if (! e) | ||||
1780 | { | ||||
1781 | gcc_assert (! loop_outer (loop))((void)(!(! loop_outer (loop)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cfgloop.cc" , 1781, __FUNCTION__), 0 : 0)); | ||||
1782 | return single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_entry_block_ptr)); | ||||
1783 | } | ||||
1784 | |||||
1785 | return e; | ||||
1786 | } | ||||
1787 | |||||
1788 | /* Returns true if E is an exit of LOOP. */ | ||||
1789 | |||||
1790 | bool | ||||
1791 | loop_exit_edge_p (const class loop *loop, const_edge e) | ||||
1792 | { | ||||
1793 | return (flow_bb_inside_loop_p (loop, e->src) | ||||
1794 | && !flow_bb_inside_loop_p (loop, e->dest)); | ||||
1795 | } | ||||
1796 | |||||
1797 | /* Returns the single exit edge of LOOP, or NULL if LOOP has either no exit | ||||
1798 | or more than one exit. If loops do not have the exits recorded, NULL | ||||
1799 | is returned always. */ | ||||
1800 | |||||
1801 | edge | ||||
1802 | single_exit (const class loop *loop) | ||||
1803 | { | ||||
1804 | struct loop_exit *exit = loop->exits->next; | ||||
1805 | |||||
1806 | if (!loops_state_satisfies_p (LOOPS_HAVE_RECORDED_EXITS)) | ||||
1807 | return NULLnullptr; | ||||
1808 | |||||
1809 | if (exit->e && exit->next == loop->exits) | ||||
1810 | return exit->e; | ||||
1811 | else | ||||
1812 | return NULLnullptr; | ||||
1813 | } | ||||
1814 | |||||
1815 | /* Returns true when BB has an incoming edge exiting LOOP. */ | ||||
1816 | |||||
1817 | bool | ||||
1818 | loop_exits_to_bb_p (class loop *loop, basic_block bb) | ||||
1819 | { | ||||
1820 | edge e; | ||||
1821 | edge_iterator ei; | ||||
1822 | |||||
1823 | FOR_EACH_EDGE (e, ei, bb->preds)for ((ei) = ei_start_1 (&((bb->preds))); ei_cond ((ei) , &(e)); ei_next (&(ei))) | ||||
1824 | if (loop_exit_edge_p (loop, e)) | ||||
1825 | return true; | ||||
1826 | |||||
1827 | return false; | ||||
1828 | } | ||||
1829 | |||||
1830 | /* Returns true when BB has an outgoing edge exiting LOOP. */ | ||||
1831 | |||||
1832 | bool | ||||
1833 | loop_exits_from_bb_p (class loop *loop, basic_block bb) | ||||
1834 | { | ||||
1835 | edge e; | ||||
1836 | edge_iterator ei; | ||||
1837 | |||||
1838 | FOR_EACH_EDGE (e, ei, bb->succs)for ((ei) = ei_start_1 (&((bb->succs))); ei_cond ((ei) , &(e)); ei_next (&(ei))) | ||||
1839 | if (loop_exit_edge_p (loop, e)) | ||||
1840 | return true; | ||||
1841 | |||||
1842 | return false; | ||||
1843 | } | ||||
1844 | |||||
1845 | /* Return location corresponding to the loop control condition if possible. */ | ||||
1846 | |||||
1847 | dump_user_location_t | ||||
1848 | get_loop_location (class loop *loop) | ||||
1849 | { | ||||
1850 | rtx_insn *insn = NULLnullptr; | ||||
1851 | class niter_desc *desc = NULLnullptr; | ||||
1852 | edge exit; | ||||
1853 | |||||
1854 | /* For a for or while loop, we would like to return the location | ||||
1855 | of the for or while statement, if possible. To do this, look | ||||
1856 | for the branch guarding the loop back-edge. */ | ||||
1857 | |||||
1858 | /* If this is a simple loop with an in_edge, then the loop control | ||||
1859 | branch is typically at the end of its source. */ | ||||
1860 | desc = get_simple_loop_desc (loop); | ||||
1861 | if (desc->in_edge) | ||||
1862 | { | ||||
1863 | FOR_BB_INSNS_REVERSE (desc->in_edge->src, insn)for ((insn) = (desc->in_edge->src)->il.x.rtl->end_ ; (insn) && (insn) != PREV_INSN ((desc->in_edge-> src)->il.x.head_); (insn) = PREV_INSN (insn)) | ||||
1864 | { | ||||
1865 | if (INSN_P (insn)(((((enum rtx_code) (insn)->code) == INSN) || (((enum rtx_code ) (insn)->code) == JUMP_INSN) || (((enum rtx_code) (insn)-> code) == CALL_INSN)) || (((enum rtx_code) (insn)->code) == DEBUG_INSN)) && INSN_HAS_LOCATION (insn)) | ||||
1866 | return insn; | ||||
1867 | } | ||||
1868 | } | ||||
1869 | /* If loop has a single exit, then the loop control branch | ||||
1870 | must be at the end of its source. */ | ||||
1871 | if ((exit = single_exit (loop))) | ||||
1872 | { | ||||
1873 | FOR_BB_INSNS_REVERSE (exit->src, insn)for ((insn) = (exit->src)->il.x.rtl->end_; (insn) && (insn) != PREV_INSN ((exit->src)->il.x.head_); (insn) = PREV_INSN (insn)) | ||||
1874 | { | ||||
1875 | if (INSN_P (insn)(((((enum rtx_code) (insn)->code) == INSN) || (((enum rtx_code ) (insn)->code) == JUMP_INSN) || (((enum rtx_code) (insn)-> code) == CALL_INSN)) || (((enum rtx_code) (insn)->code) == DEBUG_INSN)) && INSN_HAS_LOCATION (insn)) | ||||
1876 | return insn; | ||||
1877 | } | ||||
1878 | } | ||||
1879 | /* Next check the latch, to see if it is non-empty. */ | ||||
1880 | FOR_BB_INSNS_REVERSE (loop->latch, insn)for ((insn) = (loop->latch)->il.x.rtl->end_; (insn) && (insn) != PREV_INSN ((loop->latch)->il.x.head_); (insn ) = PREV_INSN (insn)) | ||||
1881 | { | ||||
1882 | if (INSN_P (insn)(((((enum rtx_code) (insn)->code) == INSN) || (((enum rtx_code ) (insn)->code) == JUMP_INSN) || (((enum rtx_code) (insn)-> code) == CALL_INSN)) || (((enum rtx_code) (insn)->code) == DEBUG_INSN)) && INSN_HAS_LOCATION (insn)) | ||||
1883 | return insn; | ||||
1884 | } | ||||
1885 | /* Finally, if none of the above identifies the loop control branch, | ||||
1886 | return the first location in the loop header. */ | ||||
1887 | FOR_BB_INSNS (loop->header, insn)for ((insn) = (loop->header)->il.x.head_; (insn) && (insn) != NEXT_INSN ((loop->header)->il.x.rtl->end_ ); (insn) = NEXT_INSN (insn)) | ||||
1888 | { | ||||
1889 | if (INSN_P (insn)(((((enum rtx_code) (insn)->code) == INSN) || (((enum rtx_code ) (insn)->code) == JUMP_INSN) || (((enum rtx_code) (insn)-> code) == CALL_INSN)) || (((enum rtx_code) (insn)->code) == DEBUG_INSN)) && INSN_HAS_LOCATION (insn)) | ||||
1890 | return insn; | ||||
1891 | } | ||||
1892 | /* If all else fails, simply return the current function location. */ | ||||
1893 | return dump_user_location_t::from_function_decl (current_function_decl); | ||||
1894 | } | ||||
1895 | |||||
1896 | /* Records that every statement in LOOP is executed I_BOUND times. | ||||
1897 | REALISTIC is true if I_BOUND is expected to be close to the real number | ||||
1898 | of iterations. UPPER is true if we are sure the loop iterates at most | ||||
1899 | I_BOUND times. */ | ||||
1900 | |||||
1901 | void | ||||
1902 | record_niter_bound (class loop *loop, const widest_int &i_bound, | ||||
1903 | bool realistic, bool upper) | ||||
1904 | { | ||||
1905 | /* Update the bounds only when there is no previous estimation, or when the | ||||
1906 | current estimation is smaller. */ | ||||
1907 | if (upper | ||||
1908 | && (!loop->any_upper_bound | ||||
1909 | || wi::ltu_p (i_bound, loop->nb_iterations_upper_bound))) | ||||
1910 | { | ||||
1911 | loop->any_upper_bound = true; | ||||
1912 | loop->nb_iterations_upper_bound = i_bound; | ||||
1913 | if (!loop->any_likely_upper_bound) | ||||
1914 | { | ||||
1915 | loop->any_likely_upper_bound = true; | ||||
1916 | loop->nb_iterations_likely_upper_bound = i_bound; | ||||
1917 | } | ||||
1918 | } | ||||
1919 | if (realistic | ||||
1920 | && (!loop->any_estimate | ||||
1921 | || wi::ltu_p (i_bound, loop->nb_iterations_estimate))) | ||||
1922 | { | ||||
1923 | loop->any_estimate = true; | ||||
1924 | loop->nb_iterations_estimate = i_bound; | ||||
1925 | } | ||||
1926 | if (!realistic | ||||
1927 | && (!loop->any_likely_upper_bound | ||||
1928 | || wi::ltu_p (i_bound, loop->nb_iterations_likely_upper_bound))) | ||||
1929 | { | ||||
1930 | loop->any_likely_upper_bound = true; | ||||
1931 | loop->nb_iterations_likely_upper_bound = i_bound; | ||||
1932 | } | ||||
1933 | |||||
1934 | /* If an upper bound is smaller than the realistic estimate of the | ||||
1935 | number of iterations, use the upper bound instead. */ | ||||
1936 | if (loop->any_upper_bound | ||||
1937 | && loop->any_estimate | ||||
1938 | && wi::ltu_p (loop->nb_iterations_upper_bound, | ||||
1939 | loop->nb_iterations_estimate)) | ||||
1940 | loop->nb_iterations_estimate = loop->nb_iterations_upper_bound; | ||||
1941 | if (loop->any_upper_bound | ||||
1942 | && loop->any_likely_upper_bound | ||||
1943 | && wi::ltu_p (loop->nb_iterations_upper_bound, | ||||
1944 | loop->nb_iterations_likely_upper_bound)) | ||||
1945 | loop->nb_iterations_likely_upper_bound = loop->nb_iterations_upper_bound; | ||||
1946 | } | ||||
1947 | |||||
1948 | /* Similar to get_estimated_loop_iterations, but returns the estimate only | ||||
1949 | if it fits to HOST_WIDE_INT. If this is not the case, or the estimate | ||||
1950 | on the number of iterations of LOOP could not be derived, returns -1. */ | ||||
1951 | |||||
1952 | HOST_WIDE_INTlong | ||||
1953 | get_estimated_loop_iterations_int (class loop *loop) | ||||
1954 | { | ||||
1955 | widest_int nit; | ||||
1956 | HOST_WIDE_INTlong hwi_nit; | ||||
1957 | |||||
1958 | if (!get_estimated_loop_iterations (loop, &nit)) | ||||
1959 | return -1; | ||||
1960 | |||||
1961 | if (!wi::fits_shwi_p (nit)) | ||||
1962 | return -1; | ||||
1963 | hwi_nit = nit.to_shwi (); | ||||
1964 | |||||
1965 | return hwi_nit < 0 ? -1 : hwi_nit; | ||||
1966 | } | ||||
1967 | |||||
1968 | /* Returns an upper bound on the number of executions of statements | ||||
1969 | in the LOOP. For statements before the loop exit, this exceeds | ||||
1970 | the number of execution of the latch by one. */ | ||||
1971 | |||||
1972 | HOST_WIDE_INTlong | ||||
1973 | max_stmt_executions_int (class loop *loop) | ||||
1974 | { | ||||
1975 | HOST_WIDE_INTlong nit = get_max_loop_iterations_int (loop); | ||||
1976 | HOST_WIDE_INTlong snit; | ||||
1977 | |||||
1978 | if (nit == -1) | ||||
1979 | return -1; | ||||
1980 | |||||
1981 | snit = (HOST_WIDE_INTlong) ((unsigned HOST_WIDE_INTlong) nit + 1); | ||||
1982 | |||||
1983 | /* If the computation overflows, return -1. */ | ||||
1984 | return snit < 0 ? -1 : snit; | ||||
1985 | } | ||||
1986 | |||||
1987 | /* Returns an likely upper bound on the number of executions of statements | ||||
1988 | in the LOOP. For statements before the loop exit, this exceeds | ||||
1989 | the number of execution of the latch by one. */ | ||||
1990 | |||||
1991 | HOST_WIDE_INTlong | ||||
1992 | likely_max_stmt_executions_int (class loop *loop) | ||||
1993 | { | ||||
1994 | HOST_WIDE_INTlong nit = get_likely_max_loop_iterations_int (loop); | ||||
1995 | HOST_WIDE_INTlong snit; | ||||
1996 | |||||
1997 | if (nit == -1) | ||||
1998 | return -1; | ||||
1999 | |||||
2000 | snit = (HOST_WIDE_INTlong) ((unsigned HOST_WIDE_INTlong) nit + 1); | ||||
2001 | |||||
2002 | /* If the computation overflows, return -1. */ | ||||
2003 | return snit < 0 ? -1 : snit; | ||||
2004 | } | ||||
2005 | |||||
2006 | /* Sets NIT to the estimated number of executions of the latch of the | ||||
2007 | LOOP. If we have no reliable estimate, the function returns false, otherwise | ||||
2008 | returns true. */ | ||||
2009 | |||||
2010 | bool | ||||
2011 | get_estimated_loop_iterations (class loop *loop, widest_int *nit) | ||||
2012 | { | ||||
2013 | /* Even if the bound is not recorded, possibly we can derrive one from | ||||
2014 | profile. */ | ||||
2015 | if (!loop->any_estimate) | ||||
2016 | { | ||||
2017 | if (loop->header->count.reliable_p ()) | ||||
2018 | { | ||||
2019 | *nit = gcov_type_to_wide_int | ||||
2020 | (expected_loop_iterations_unbounded (loop) + 1); | ||||
2021 | return true; | ||||
2022 | } | ||||
2023 | return false; | ||||
2024 | } | ||||
2025 | |||||
2026 | *nit = loop->nb_iterations_estimate; | ||||
2027 | return true; | ||||
2028 | } | ||||
2029 | |||||
2030 | /* Sets NIT to an upper bound for the maximum number of executions of the | ||||
2031 | latch of the LOOP. If we have no reliable estimate, the function returns | ||||
2032 | false, otherwise returns true. */ | ||||
2033 | |||||
2034 | bool | ||||
2035 | get_max_loop_iterations (const class loop *loop, widest_int *nit) | ||||
2036 | { | ||||
2037 | if (!loop->any_upper_bound) | ||||
2038 | return false; | ||||
2039 | |||||
2040 | *nit = loop->nb_iterations_upper_bound; | ||||
2041 | return true; | ||||
2042 | } | ||||
2043 | |||||
2044 | /* Similar to get_max_loop_iterations, but returns the estimate only | ||||
2045 | if it fits to HOST_WIDE_INT. If this is not the case, or the estimate | ||||
2046 | on the number of iterations of LOOP could not be derived, returns -1. */ | ||||
2047 | |||||
2048 | HOST_WIDE_INTlong | ||||
2049 | get_max_loop_iterations_int (const class loop *loop) | ||||
2050 | { | ||||
2051 | widest_int nit; | ||||
2052 | HOST_WIDE_INTlong hwi_nit; | ||||
2053 | |||||
2054 | if (!get_max_loop_iterations (loop, &nit)) | ||||
2055 | return -1; | ||||
2056 | |||||
2057 | if (!wi::fits_shwi_p (nit)) | ||||
2058 | return -1; | ||||
2059 | hwi_nit = nit.to_shwi (); | ||||
2060 | |||||
2061 | return hwi_nit < 0 ? -1 : hwi_nit; | ||||
2062 | } | ||||
2063 | |||||
2064 | /* Sets NIT to an upper bound for the maximum number of executions of the | ||||
2065 | latch of the LOOP. If we have no reliable estimate, the function returns | ||||
2066 | false, otherwise returns true. */ | ||||
2067 | |||||
2068 | bool | ||||
2069 | get_likely_max_loop_iterations (class loop *loop, widest_int *nit) | ||||
2070 | { | ||||
2071 | if (!loop->any_likely_upper_bound) | ||||
2072 | return false; | ||||
2073 | |||||
2074 | *nit = loop->nb_iterations_likely_upper_bound; | ||||
2075 | return true; | ||||
2076 | } | ||||
2077 | |||||
2078 | /* Similar to get_max_loop_iterations, but returns the estimate only | ||||
2079 | if it fits to HOST_WIDE_INT. If this is not the case, or the estimate | ||||
2080 | on the number of iterations of LOOP could not be derived, returns -1. */ | ||||
2081 | |||||
2082 | HOST_WIDE_INTlong | ||||
2083 | get_likely_max_loop_iterations_int (class loop *loop) | ||||
2084 | { | ||||
2085 | widest_int nit; | ||||
2086 | HOST_WIDE_INTlong hwi_nit; | ||||
2087 | |||||
2088 | if (!get_likely_max_loop_iterations (loop, &nit)) | ||||
2089 | return -1; | ||||
2090 | |||||
2091 | if (!wi::fits_shwi_p (nit)) | ||||
2092 | return -1; | ||||
2093 | hwi_nit = nit.to_shwi (); | ||||
2094 | |||||
2095 | return hwi_nit < 0 ? -1 : hwi_nit; | ||||
2096 | } | ||||
2097 | |||||
2098 | /* Returns the loop depth of the loop BB belongs to. */ | ||||
2099 | |||||
2100 | int | ||||
2101 | bb_loop_depth (const_basic_block bb) | ||||
2102 | { | ||||
2103 | return bb->loop_father ? loop_depth (bb->loop_father) : 0; | ||||
2104 | } | ||||
2105 | |||||
2106 | /* Marks LOOP for removal and sets LOOPS_NEED_FIXUP. */ | ||||
2107 | |||||
2108 | void | ||||
2109 | mark_loop_for_removal (loop_p loop) | ||||
2110 | { | ||||
2111 | if (loop->header == NULLnullptr) | ||||
2112 | return; | ||||
2113 | loop->former_header = loop->header; | ||||
2114 | loop->header = NULLnullptr; | ||||
2115 | loop->latch = NULLnullptr; | ||||
2116 | loops_state_set (LOOPS_NEED_FIXUP); | ||||
2117 | } | ||||
2118 | |||||
2119 | /* Starting from loop tree ROOT, walk loop tree as the visiting | ||||
2120 | order specified by FLAGS. The supported visiting orders | ||||
2121 | are: | ||||
2122 | - LI_ONLY_INNERMOST | ||||
2123 | - LI_FROM_INNERMOST | ||||
2124 | - Preorder (if neither of above is specified) */ | ||||
2125 | |||||
2126 | void | ||||
2127 | loops_list::walk_loop_tree (class loop *root, unsigned flags) | ||||
2128 | { | ||||
2129 | bool only_innermost_p = flags & LI_ONLY_INNERMOST; | ||||
2130 | bool from_innermost_p = flags & LI_FROM_INNERMOST; | ||||
2131 | bool preorder_p = !(only_innermost_p || from_innermost_p); | ||||
2132 | |||||
2133 | /* Early handle root without any inner loops, make later | ||||
2134 | processing simpler, that is all loops processed in the | ||||
2135 | following while loop are impossible to be root. */ | ||||
2136 | if (!root->inner) | ||||
2137 | { | ||||
2138 | if (flags & LI_INCLUDE_ROOT) | ||||
2139 | this->to_visit.quick_push (root->num); | ||||
2140 | return; | ||||
2141 | } | ||||
2142 | else if (preorder_p && flags & LI_INCLUDE_ROOT) | ||||
2143 | this->to_visit.quick_push (root->num); | ||||
2144 | |||||
2145 | class loop *aloop; | ||||
2146 | for (aloop = root->inner; | ||||
2147 | aloop->inner != NULLnullptr; | ||||
2148 | aloop = aloop->inner) | ||||
2149 | { | ||||
2150 | if (preorder_p) | ||||
2151 | this->to_visit.quick_push (aloop->num); | ||||
2152 | continue; | ||||
2153 | } | ||||
2154 | |||||
2155 | while (1) | ||||
2156 | { | ||||
2157 | gcc_assert (aloop != root)((void)(!(aloop != root) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cfgloop.cc" , 2157, __FUNCTION__), 0 : 0)); | ||||
2158 | if (from_innermost_p || aloop->inner == NULLnullptr) | ||||
2159 | this->to_visit.quick_push (aloop->num); | ||||
2160 | |||||
2161 | if (aloop->next) | ||||
2162 | { | ||||
2163 | for (aloop = aloop->next; | ||||
2164 | aloop->inner != NULLnullptr; | ||||
2165 | aloop = aloop->inner) | ||||
2166 | { | ||||
2167 | if (preorder_p) | ||||
2168 | this->to_visit.quick_push (aloop->num); | ||||
2169 | continue; | ||||
2170 | } | ||||
2171 | } | ||||
2172 | else if (loop_outer (aloop) == root) | ||||
2173 | break; | ||||
2174 | else | ||||
2175 | aloop = loop_outer (aloop); | ||||
2176 | } | ||||
2177 | |||||
2178 | /* When visiting from innermost, we need to consider root here | ||||
2179 | since the previous while loop doesn't handle it. */ | ||||
2180 | if (from_innermost_p && flags & LI_INCLUDE_ROOT) | ||||
2181 | this->to_visit.quick_push (root->num); | ||||
2182 | } | ||||
2183 |
1 | /* Vector API for GNU compiler. | ||||
2 | Copyright (C) 2004-2023 Free Software Foundation, Inc. | ||||
3 | Contributed by Nathan Sidwell <nathan@codesourcery.com> | ||||
4 | Re-implemented in C++ by Diego Novillo <dnovillo@google.com> | ||||
5 | |||||
6 | This file is part of GCC. | ||||
7 | |||||
8 | GCC is free software; you can redistribute it and/or modify it under | ||||
9 | the terms of the GNU General Public License as published by the Free | ||||
10 | Software Foundation; either version 3, or (at your option) any later | ||||
11 | version. | ||||
12 | |||||
13 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY | ||||
14 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | ||||
15 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | ||||
16 | for more details. | ||||
17 | |||||
18 | You should have received a copy of the GNU General Public License | ||||
19 | along with GCC; see the file COPYING3. If not see | ||||
20 | <http://www.gnu.org/licenses/>. */ | ||||
21 | |||||
22 | #ifndef GCC_VEC_H | ||||
23 | #define GCC_VEC_H | ||||
24 | |||||
25 | /* Some gen* file have no ggc support as the header file gtype-desc.h is | ||||
26 | missing. Provide these definitions in case ggc.h has not been included. | ||||
27 | This is not a problem because any code that runs before gengtype is built | ||||
28 | will never need to use GC vectors.*/ | ||||
29 | |||||
30 | extern void ggc_free (void *); | ||||
31 | extern size_t ggc_round_alloc_size (size_t requested_size); | ||||
32 | extern void *ggc_realloc (void *, size_t MEM_STAT_DECL); | ||||
33 | |||||
34 | /* Templated vector type and associated interfaces. | ||||
35 | |||||
36 | The interface functions are typesafe and use inline functions, | ||||
37 | sometimes backed by out-of-line generic functions. The vectors are | ||||
38 | designed to interoperate with the GTY machinery. | ||||
39 | |||||
40 | There are both 'index' and 'iterate' accessors. The index accessor | ||||
41 | is implemented by operator[]. The iterator returns a boolean | ||||
42 | iteration condition and updates the iteration variable passed by | ||||
43 | reference. Because the iterator will be inlined, the address-of | ||||
44 | can be optimized away. | ||||
45 | |||||
46 | Each operation that increases the number of active elements is | ||||
47 | available in 'quick' and 'safe' variants. The former presumes that | ||||
48 | there is sufficient allocated space for the operation to succeed | ||||
49 | (it dies if there is not). The latter will reallocate the | ||||
50 | vector, if needed. Reallocation causes an exponential increase in | ||||
51 | vector size. If you know you will be adding N elements, it would | ||||
52 | be more efficient to use the reserve operation before adding the | ||||
53 | elements with the 'quick' operation. This will ensure there are at | ||||
54 | least as many elements as you ask for, it will exponentially | ||||
55 | increase if there are too few spare slots. If you want reserve a | ||||
56 | specific number of slots, but do not want the exponential increase | ||||
57 | (for instance, you know this is the last allocation), use the | ||||
58 | reserve_exact operation. You can also create a vector of a | ||||
59 | specific size from the get go. | ||||
60 | |||||
61 | You should prefer the push and pop operations, as they append and | ||||
62 | remove from the end of the vector. If you need to remove several | ||||
63 | items in one go, use the truncate operation. The insert and remove | ||||
64 | operations allow you to change elements in the middle of the | ||||
65 | vector. There are two remove operations, one which preserves the | ||||
66 | element ordering 'ordered_remove', and one which does not | ||||
67 | 'unordered_remove'. The latter function copies the end element | ||||
68 | into the removed slot, rather than invoke a memmove operation. The | ||||
69 | 'lower_bound' function will determine where to place an item in the | ||||
70 | array using insert that will maintain sorted order. | ||||
71 | |||||
72 | Vectors are template types with three arguments: the type of the | ||||
73 | elements in the vector, the allocation strategy, and the physical | ||||
74 | layout to use | ||||
75 | |||||
76 | Four allocation strategies are supported: | ||||
77 | |||||
78 | - Heap: allocation is done using malloc/free. This is the | ||||
79 | default allocation strategy. | ||||
80 | |||||
81 | - GC: allocation is done using ggc_alloc/ggc_free. | ||||
82 | |||||
83 | - GC atomic: same as GC with the exception that the elements | ||||
84 | themselves are assumed to be of an atomic type that does | ||||
85 | not need to be garbage collected. This means that marking | ||||
86 | routines do not need to traverse the array marking the | ||||
87 | individual elements. This increases the performance of | ||||
88 | GC activities. | ||||
89 | |||||
90 | Two physical layouts are supported: | ||||
91 | |||||
92 | - Embedded: The vector is structured using the trailing array | ||||
93 | idiom. The last member of the structure is an array of size | ||||
94 | 1. When the vector is initially allocated, a single memory | ||||
95 | block is created to hold the vector's control data and the | ||||
96 | array of elements. These vectors cannot grow without | ||||
97 | reallocation (see discussion on embeddable vectors below). | ||||
98 | |||||
99 | - Space efficient: The vector is structured as a pointer to an | ||||
100 | embedded vector. This is the default layout. It means that | ||||
101 | vectors occupy a single word of storage before initial | ||||
102 | allocation. Vectors are allowed to grow (the internal | ||||
103 | pointer is reallocated but the main vector instance does not | ||||
104 | need to relocate). | ||||
105 | |||||
106 | The type, allocation and layout are specified when the vector is | ||||
107 | declared. | ||||
108 | |||||
109 | If you need to directly manipulate a vector, then the 'address' | ||||
110 | accessor will return the address of the start of the vector. Also | ||||
111 | the 'space' predicate will tell you whether there is spare capacity | ||||
112 | in the vector. You will not normally need to use these two functions. | ||||
113 | |||||
114 | Notes on the different layout strategies | ||||
115 | |||||
116 | * Embeddable vectors (vec<T, A, vl_embed>) | ||||
117 | |||||
118 | These vectors are suitable to be embedded in other data | ||||
119 | structures so that they can be pre-allocated in a contiguous | ||||
120 | memory block. | ||||
121 | |||||
122 | Embeddable vectors are implemented using the trailing array | ||||
123 | idiom, thus they are not resizeable without changing the address | ||||
124 | of the vector object itself. This means you cannot have | ||||
125 | variables or fields of embeddable vector type -- always use a | ||||
126 | pointer to a vector. The one exception is the final field of a | ||||
127 | structure, which could be a vector type. | ||||
128 | |||||
129 | You will have to use the embedded_size & embedded_init calls to | ||||
130 | create such objects, and they will not be resizeable (so the | ||||
131 | 'safe' allocation variants are not available). | ||||
132 | |||||
133 | Properties of embeddable vectors: | ||||
134 | |||||
135 | - The whole vector and control data are allocated in a single | ||||
136 | contiguous block. It uses the trailing-vector idiom, so | ||||
137 | allocation must reserve enough space for all the elements | ||||
138 | in the vector plus its control data. | ||||
139 | - The vector cannot be re-allocated. | ||||
140 | - The vector cannot grow nor shrink. | ||||
141 | - No indirections needed for access/manipulation. | ||||
142 | - It requires 2 words of storage (prior to vector allocation). | ||||
143 | |||||
144 | |||||
145 | * Space efficient vector (vec<T, A, vl_ptr>) | ||||
146 | |||||
147 | These vectors can grow dynamically and are allocated together | ||||
148 | with their control data. They are suited to be included in data | ||||
149 | structures. Prior to initial allocation, they only take a single | ||||
150 | word of storage. | ||||
151 | |||||
152 | These vectors are implemented as a pointer to embeddable vectors. | ||||
153 | The semantics allow for this pointer to be NULL to represent | ||||
154 | empty vectors. This way, empty vectors occupy minimal space in | ||||
155 | the structure containing them. | ||||
156 | |||||
157 | Properties: | ||||
158 | |||||
159 | - The whole vector and control data are allocated in a single | ||||
160 | contiguous block. | ||||
161 | - The whole vector may be re-allocated. | ||||
162 | - Vector data may grow and shrink. | ||||
163 | - Access and manipulation requires a pointer test and | ||||
164 | indirection. | ||||
165 | - It requires 1 word of storage (prior to vector allocation). | ||||
166 | |||||
167 | An example of their use would be, | ||||
168 | |||||
169 | struct my_struct { | ||||
170 | // A space-efficient vector of tree pointers in GC memory. | ||||
171 | vec<tree, va_gc, vl_ptr> v; | ||||
172 | }; | ||||
173 | |||||
174 | struct my_struct *s; | ||||
175 | |||||
176 | if (s->v.length ()) { we have some contents } | ||||
177 | s->v.safe_push (decl); // append some decl onto the end | ||||
178 | for (ix = 0; s->v.iterate (ix, &elt); ix++) | ||||
179 | { do something with elt } | ||||
180 | */ | ||||
181 | |||||
182 | /* Support function for statistics. */ | ||||
183 | extern void dump_vec_loc_statistics (void); | ||||
184 | |||||
185 | /* Hashtable mapping vec addresses to descriptors. */ | ||||
186 | extern htab_t vec_mem_usage_hash; | ||||
187 | |||||
188 | /* Control data for vectors. This contains the number of allocated | ||||
189 | and used slots inside a vector. */ | ||||
190 | |||||
191 | struct vec_prefix | ||||
192 | { | ||||
193 | /* FIXME - These fields should be private, but we need to cater to | ||||
194 | compilers that have stricter notions of PODness for types. */ | ||||
195 | |||||
196 | /* Memory allocation support routines in vec.cc. */ | ||||
197 | void register_overhead (void *, size_t, size_t CXX_MEM_STAT_INFO); | ||||
198 | void release_overhead (void *, size_t, size_t, bool CXX_MEM_STAT_INFO); | ||||
199 | static unsigned calculate_allocation (vec_prefix *, unsigned, bool); | ||||
200 | static unsigned calculate_allocation_1 (unsigned, unsigned); | ||||
201 | |||||
202 | /* Note that vec_prefix should be a base class for vec, but we use | ||||
203 | offsetof() on vector fields of tree structures (e.g., | ||||
204 | tree_binfo::base_binfos), and offsetof only supports base types. | ||||
205 | |||||
206 | To compensate, we make vec_prefix a field inside vec and make | ||||
207 | vec a friend class of vec_prefix so it can access its fields. */ | ||||
208 | template <typename, typename, typename> friend struct vec; | ||||
209 | |||||
210 | /* The allocator types also need access to our internals. */ | ||||
211 | friend struct va_gc; | ||||
212 | friend struct va_gc_atomic; | ||||
213 | friend struct va_heap; | ||||
214 | |||||
215 | unsigned m_alloc : 31; | ||||
216 | unsigned m_using_auto_storage : 1; | ||||
217 | unsigned m_num; | ||||
218 | }; | ||||
219 | |||||
220 | /* Calculate the number of slots to reserve a vector, making sure that | ||||
221 | RESERVE slots are free. If EXACT grow exactly, otherwise grow | ||||
222 | exponentially. PFX is the control data for the vector. */ | ||||
223 | |||||
224 | inline unsigned | ||||
225 | vec_prefix::calculate_allocation (vec_prefix *pfx, unsigned reserve, | ||||
226 | bool exact) | ||||
227 | { | ||||
228 | if (exact) | ||||
229 | return (pfx ? pfx->m_num : 0) + reserve; | ||||
230 | else if (!pfx) | ||||
231 | return MAX (4, reserve)((4) > (reserve) ? (4) : (reserve)); | ||||
232 | return calculate_allocation_1 (pfx->m_alloc, pfx->m_num + reserve); | ||||
233 | } | ||||
234 | |||||
235 | template<typename, typename, typename> struct vec; | ||||
236 | |||||
237 | /* Valid vector layouts | ||||
238 | |||||
239 | vl_embed - Embeddable vector that uses the trailing array idiom. | ||||
240 | vl_ptr - Space efficient vector that uses a pointer to an | ||||
241 | embeddable vector. */ | ||||
242 | struct vl_embed { }; | ||||
243 | struct vl_ptr { }; | ||||
244 | |||||
245 | |||||
246 | /* Types of supported allocations | ||||
247 | |||||
248 | va_heap - Allocation uses malloc/free. | ||||
249 | va_gc - Allocation uses ggc_alloc. | ||||
250 | va_gc_atomic - Same as GC, but individual elements of the array | ||||
251 | do not need to be marked during collection. */ | ||||
252 | |||||
253 | /* Allocator type for heap vectors. */ | ||||
254 | struct va_heap | ||||
255 | { | ||||
256 | /* Heap vectors are frequently regular instances, so use the vl_ptr | ||||
257 | layout for them. */ | ||||
258 | typedef vl_ptr default_layout; | ||||
259 | |||||
260 | template<typename T> | ||||
261 | static void reserve (vec<T, va_heap, vl_embed> *&, unsigned, bool | ||||
262 | CXX_MEM_STAT_INFO); | ||||
263 | |||||
264 | template<typename T> | ||||
265 | static void release (vec<T, va_heap, vl_embed> *&); | ||||
266 | }; | ||||
267 | |||||
268 | |||||
269 | /* Allocator for heap memory. Ensure there are at least RESERVE free | ||||
270 | slots in V. If EXACT is true, grow exactly, else grow | ||||
271 | exponentially. As a special case, if the vector had not been | ||||
272 | allocated and RESERVE is 0, no vector will be created. */ | ||||
273 | |||||
274 | template<typename T> | ||||
275 | inline void | ||||
276 | va_heap::reserve (vec<T, va_heap, vl_embed> *&v, unsigned reserve, bool exact | ||||
277 | MEM_STAT_DECL) | ||||
278 | { | ||||
279 | size_t elt_size = sizeof (T); | ||||
280 | unsigned alloc | ||||
281 | = vec_prefix::calculate_allocation (v ? &v->m_vecpfx : 0, reserve, exact); | ||||
282 | gcc_checking_assert (alloc)((void)(!(alloc) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 282, __FUNCTION__), 0 : 0)); | ||||
283 | |||||
284 | if (GATHER_STATISTICS0 && v) | ||||
285 | v->m_vecpfx.release_overhead (v, elt_size * v->allocated (), | ||||
286 | v->allocated (), false); | ||||
287 | |||||
288 | size_t size = vec<T, va_heap, vl_embed>::embedded_size (alloc); | ||||
289 | unsigned nelem = v ? v->length () : 0; | ||||
290 | v = static_cast <vec<T, va_heap, vl_embed> *> (xrealloc (v, size)); | ||||
291 | v->embedded_init (alloc, nelem); | ||||
292 | |||||
293 | if (GATHER_STATISTICS0) | ||||
294 | v->m_vecpfx.register_overhead (v, alloc, elt_size PASS_MEM_STAT); | ||||
295 | } | ||||
296 | |||||
297 | |||||
298 | #if GCC_VERSION(4 * 1000 + 2) >= 4007 | ||||
299 | #pragma GCC diagnostic push | ||||
300 | #pragma GCC diagnostic ignored "-Wfree-nonheap-object" | ||||
301 | #endif | ||||
302 | |||||
303 | /* Free the heap space allocated for vector V. */ | ||||
304 | |||||
305 | template<typename T> | ||||
306 | void | ||||
307 | va_heap::release (vec<T, va_heap, vl_embed> *&v) | ||||
308 | { | ||||
309 | size_t elt_size = sizeof (T); | ||||
310 | if (v == NULLnullptr) | ||||
311 | return; | ||||
312 | |||||
313 | if (GATHER_STATISTICS0) | ||||
314 | v->m_vecpfx.release_overhead (v, elt_size * v->allocated (), | ||||
315 | v->allocated (), true); | ||||
316 | ::free (v); | ||||
317 | v = NULLnullptr; | ||||
318 | } | ||||
319 | |||||
320 | #if GCC_VERSION(4 * 1000 + 2) >= 4007 | ||||
321 | #pragma GCC diagnostic pop | ||||
322 | #endif | ||||
323 | |||||
324 | /* Allocator type for GC vectors. Notice that we need the structure | ||||
325 | declaration even if GC is not enabled. */ | ||||
326 | |||||
327 | struct va_gc | ||||
328 | { | ||||
329 | /* Use vl_embed as the default layout for GC vectors. Due to GTY | ||||
330 | limitations, GC vectors must always be pointers, so it is more | ||||
331 | efficient to use a pointer to the vl_embed layout, rather than | ||||
332 | using a pointer to a pointer as would be the case with vl_ptr. */ | ||||
333 | typedef vl_embed default_layout; | ||||
334 | |||||
335 | template<typename T, typename A> | ||||
336 | static void reserve (vec<T, A, vl_embed> *&, unsigned, bool | ||||
337 | CXX_MEM_STAT_INFO); | ||||
338 | |||||
339 | template<typename T, typename A> | ||||
340 | static void release (vec<T, A, vl_embed> *&v); | ||||
341 | }; | ||||
342 | |||||
343 | |||||
344 | /* Free GC memory used by V and reset V to NULL. */ | ||||
345 | |||||
346 | template<typename T, typename A> | ||||
347 | inline void | ||||
348 | va_gc::release (vec<T, A, vl_embed> *&v) | ||||
349 | { | ||||
350 | if (v) | ||||
351 | ::ggc_free (v); | ||||
352 | v = NULLnullptr; | ||||
353 | } | ||||
354 | |||||
355 | |||||
356 | /* Allocator for GC memory. Ensure there are at least RESERVE free | ||||
357 | slots in V. If EXACT is true, grow exactly, else grow | ||||
358 | exponentially. As a special case, if the vector had not been | ||||
359 | allocated and RESERVE is 0, no vector will be created. */ | ||||
360 | |||||
361 | template<typename T, typename A> | ||||
362 | void | ||||
363 | va_gc::reserve (vec<T, A, vl_embed> *&v, unsigned reserve, bool exact | ||||
364 | MEM_STAT_DECL) | ||||
365 | { | ||||
366 | unsigned alloc | ||||
367 | = vec_prefix::calculate_allocation (v ? &v->m_vecpfx : 0, reserve, exact); | ||||
368 | if (!alloc) | ||||
369 | { | ||||
370 | ::ggc_free (v); | ||||
371 | v = NULLnullptr; | ||||
372 | return; | ||||
373 | } | ||||
374 | |||||
375 | /* Calculate the amount of space we want. */ | ||||
376 | size_t size = vec<T, A, vl_embed>::embedded_size (alloc); | ||||
377 | |||||
378 | /* Ask the allocator how much space it will really give us. */ | ||||
379 | size = ::ggc_round_alloc_size (size); | ||||
380 | |||||
381 | /* Adjust the number of slots accordingly. */ | ||||
382 | size_t vec_offset = sizeof (vec_prefix); | ||||
383 | size_t elt_size = sizeof (T); | ||||
384 | alloc = (size - vec_offset) / elt_size; | ||||
385 | |||||
386 | /* And finally, recalculate the amount of space we ask for. */ | ||||
387 | size = vec_offset + alloc * elt_size; | ||||
388 | |||||
389 | unsigned nelem = v ? v->length () : 0; | ||||
390 | v = static_cast <vec<T, A, vl_embed> *> (::ggc_realloc (v, size | ||||
391 | PASS_MEM_STAT)); | ||||
392 | v->embedded_init (alloc, nelem); | ||||
393 | } | ||||
394 | |||||
395 | |||||
396 | /* Allocator type for GC vectors. This is for vectors of types | ||||
397 | atomics w.r.t. collection, so allocation and deallocation is | ||||
398 | completely inherited from va_gc. */ | ||||
399 | struct va_gc_atomic : va_gc | ||||
400 | { | ||||
401 | }; | ||||
402 | |||||
403 | |||||
404 | /* Generic vector template. Default values for A and L indicate the | ||||
405 | most commonly used strategies. | ||||
406 | |||||
407 | FIXME - Ideally, they would all be vl_ptr to encourage using regular | ||||
408 | instances for vectors, but the existing GTY machinery is limited | ||||
409 | in that it can only deal with GC objects that are pointers | ||||
410 | themselves. | ||||
411 | |||||
412 | This means that vector operations that need to deal with | ||||
413 | potentially NULL pointers, must be provided as free | ||||
414 | functions (see the vec_safe_* functions above). */ | ||||
415 | template<typename T, | ||||
416 | typename A = va_heap, | ||||
417 | typename L = typename A::default_layout> | ||||
418 | struct GTY((user)) vec | ||||
419 | { | ||||
420 | }; | ||||
421 | |||||
422 | /* Allow C++11 range-based 'for' to work directly on vec<T>*. */ | ||||
423 | template<typename T, typename A, typename L> | ||||
424 | T* begin (vec<T,A,L> *v) { return v ? v->begin () : nullptr; } | ||||
425 | template<typename T, typename A, typename L> | ||||
426 | T* end (vec<T,A,L> *v) { return v ? v->end () : nullptr; } | ||||
427 | template<typename T, typename A, typename L> | ||||
428 | const T* begin (const vec<T,A,L> *v) { return v ? v->begin () : nullptr; } | ||||
429 | template<typename T, typename A, typename L> | ||||
430 | const T* end (const vec<T,A,L> *v) { return v ? v->end () : nullptr; } | ||||
431 | |||||
432 | /* Generic vec<> debug helpers. | ||||
433 | |||||
434 | These need to be instantiated for each vec<TYPE> used throughout | ||||
435 | the compiler like this: | ||||
436 | |||||
437 | DEFINE_DEBUG_VEC (TYPE) | ||||
438 | |||||
439 | The reason we have a debug_helper() is because GDB can't | ||||
440 | disambiguate a plain call to debug(some_vec), and it must be called | ||||
441 | like debug<TYPE>(some_vec). */ | ||||
442 | |||||
443 | template<typename T> | ||||
444 | void | ||||
445 | debug_helper (vec<T> &ref) | ||||
446 | { | ||||
447 | unsigned i; | ||||
448 | for (i = 0; i < ref.length (); ++i) | ||||
449 | { | ||||
450 | fprintf (stderrstderr, "[%d] = ", i); | ||||
451 | debug_slim (ref[i]); | ||||
452 | fputc ('\n', stderrstderr); | ||||
453 | } | ||||
454 | } | ||||
455 | |||||
456 | /* We need a separate va_gc variant here because default template | ||||
457 | argument for functions cannot be used in c++-98. Once this | ||||
458 | restriction is removed, those variant should be folded with the | ||||
459 | above debug_helper. */ | ||||
460 | |||||
461 | template<typename T> | ||||
462 | void | ||||
463 | debug_helper (vec<T, va_gc> &ref) | ||||
464 | { | ||||
465 | unsigned i; | ||||
466 | for (i = 0; i < ref.length (); ++i) | ||||
467 | { | ||||
468 | fprintf (stderrstderr, "[%d] = ", i); | ||||
469 | debug_slim (ref[i]); | ||||
470 | fputc ('\n', stderrstderr); | ||||
471 | } | ||||
472 | } | ||||
473 | |||||
474 | /* Macro to define debug(vec<T>) and debug(vec<T, va_gc>) helper | ||||
475 | functions for a type T. */ | ||||
476 | |||||
477 | #define DEFINE_DEBUG_VEC(T)template void debug_helper (vec<T> &); template void debug_helper (vec<T, va_gc> &); __attribute__ ((__used__ )) void debug (vec<T> &ref) { debug_helper <T> (ref); } __attribute__ ((__used__)) void debug (vec<T> *ptr) { if (ptr) debug (*ptr); else fprintf (stderr, "<nil>\n" ); } __attribute__ ((__used__)) void debug (vec<T, va_gc> &ref) { debug_helper <T> (ref); } __attribute__ (( __used__)) void debug (vec<T, va_gc> *ptr) { if (ptr) debug (*ptr); else fprintf (stderr, "<nil>\n"); } \ | ||||
478 | template void debug_helper (vec<T> &); \ | ||||
479 | template void debug_helper (vec<T, va_gc> &); \ | ||||
480 | /* Define the vec<T> debug functions. */ \ | ||||
481 | DEBUG_FUNCTION__attribute__ ((__used__)) void \ | ||||
482 | debug (vec<T> &ref) \ | ||||
483 | { \ | ||||
484 | debug_helper <T> (ref); \ | ||||
485 | } \ | ||||
486 | DEBUG_FUNCTION__attribute__ ((__used__)) void \ | ||||
487 | debug (vec<T> *ptr) \ | ||||
488 | { \ | ||||
489 | if (ptr) \ | ||||
490 | debug (*ptr); \ | ||||
491 | else \ | ||||
492 | fprintf (stderrstderr, "<nil>\n"); \ | ||||
493 | } \ | ||||
494 | /* Define the vec<T, va_gc> debug functions. */ \ | ||||
495 | DEBUG_FUNCTION__attribute__ ((__used__)) void \ | ||||
496 | debug (vec<T, va_gc> &ref) \ | ||||
497 | { \ | ||||
498 | debug_helper <T> (ref); \ | ||||
499 | } \ | ||||
500 | DEBUG_FUNCTION__attribute__ ((__used__)) void \ | ||||
501 | debug (vec<T, va_gc> *ptr) \ | ||||
502 | { \ | ||||
503 | if (ptr) \ | ||||
504 | debug (*ptr); \ | ||||
505 | else \ | ||||
506 | fprintf (stderrstderr, "<nil>\n"); \ | ||||
507 | } | ||||
508 | |||||
509 | /* Default-construct N elements in DST. */ | ||||
510 | |||||
511 | template <typename T> | ||||
512 | inline void | ||||
513 | vec_default_construct (T *dst, unsigned n) | ||||
514 | { | ||||
515 | #ifdef BROKEN_VALUE_INITIALIZATION | ||||
516 | /* Versions of GCC before 4.4 sometimes leave certain objects | ||||
517 | uninitialized when value initialized, though if the type has | ||||
518 | user defined default ctor, that ctor is invoked. As a workaround | ||||
519 | perform clearing first and then the value initialization, which | ||||
520 | fixes the case when value initialization doesn't initialize due to | ||||
521 | the bugs and should initialize to all zeros, but still allows | ||||
522 | vectors for types with user defined default ctor that initializes | ||||
523 | some or all elements to non-zero. If T has no user defined | ||||
524 | default ctor and some non-static data members have user defined | ||||
525 | default ctors that initialize to non-zero the workaround will | ||||
526 | still not work properly; in that case we just need to provide | ||||
527 | user defined default ctor. */ | ||||
528 | memset (dst, '\0', sizeof (T) * n); | ||||
529 | #endif | ||||
530 | for ( ; n; ++dst, --n) | ||||
531 | ::new (static_cast<void*>(dst)) T (); | ||||
532 | } | ||||
533 | |||||
534 | /* Copy-construct N elements in DST from *SRC. */ | ||||
535 | |||||
536 | template <typename T> | ||||
537 | inline void | ||||
538 | vec_copy_construct (T *dst, const T *src, unsigned n) | ||||
539 | { | ||||
540 | for ( ; n; ++dst, ++src, --n) | ||||
541 | ::new (static_cast<void*>(dst)) T (*src); | ||||
542 | } | ||||
543 | |||||
544 | /* Type to provide zero-initialized values for vec<T, A, L>. This is | ||||
545 | used to provide nil initializers for vec instances. Since vec must | ||||
546 | be a trivially copyable type that can be copied by memcpy and zeroed | ||||
547 | out by memset, it must have defaulted default and copy ctor and copy | ||||
548 | assignment. To initialize a vec either use value initialization | ||||
549 | (e.g., vec() or vec v{ };) or assign it the value vNULL. This isn't | ||||
550 | needed for file-scope and function-local static vectors, which are | ||||
551 | zero-initialized by default. */ | ||||
552 | struct vnull { }; | ||||
553 | constexpr vnull vNULL{ }; | ||||
554 | |||||
555 | |||||
556 | /* Embeddable vector. These vectors are suitable to be embedded | ||||
557 | in other data structures so that they can be pre-allocated in a | ||||
558 | contiguous memory block. | ||||
559 | |||||
560 | Embeddable vectors are implemented using the trailing array idiom, | ||||
561 | thus they are not resizeable without changing the address of the | ||||
562 | vector object itself. This means you cannot have variables or | ||||
563 | fields of embeddable vector type -- always use a pointer to a | ||||
564 | vector. The one exception is the final field of a structure, which | ||||
565 | could be a vector type. | ||||
566 | |||||
567 | You will have to use the embedded_size & embedded_init calls to | ||||
568 | create such objects, and they will not be resizeable (so the 'safe' | ||||
569 | allocation variants are not available). | ||||
570 | |||||
571 | Properties: | ||||
572 | |||||
573 | - The whole vector and control data are allocated in a single | ||||
574 | contiguous block. It uses the trailing-vector idiom, so | ||||
575 | allocation must reserve enough space for all the elements | ||||
576 | in the vector plus its control data. | ||||
577 | - The vector cannot be re-allocated. | ||||
578 | - The vector cannot grow nor shrink. | ||||
579 | - No indirections needed for access/manipulation. | ||||
580 | - It requires 2 words of storage (prior to vector allocation). */ | ||||
581 | |||||
582 | template<typename T, typename A> | ||||
583 | struct GTY((user)) vec<T, A, vl_embed> | ||||
584 | { | ||||
585 | public: | ||||
586 | unsigned allocated (void) const { return m_vecpfx.m_alloc; } | ||||
587 | unsigned length (void) const { return m_vecpfx.m_num; } | ||||
588 | bool is_empty (void) const { return m_vecpfx.m_num == 0; } | ||||
589 | T *address (void) { return reinterpret_cast <T *> (this + 1); } | ||||
590 | const T *address (void) const | ||||
591 | { return reinterpret_cast <const T *> (this + 1); } | ||||
592 | T *begin () { return address (); } | ||||
593 | const T *begin () const { return address (); } | ||||
594 | T *end () { return address () + length (); } | ||||
595 | const T *end () const { return address () + length (); } | ||||
596 | const T &operator[] (unsigned) const; | ||||
597 | T &operator[] (unsigned); | ||||
598 | T &last (void); | ||||
599 | bool space (unsigned) const; | ||||
600 | bool iterate (unsigned, T *) const; | ||||
601 | bool iterate (unsigned, T **) const; | ||||
602 | vec *copy (ALONE_CXX_MEM_STAT_INFO) const; | ||||
603 | void splice (const vec &); | ||||
604 | void splice (const vec *src); | ||||
605 | T *quick_push (const T &); | ||||
606 | T &pop (void); | ||||
607 | void truncate (unsigned); | ||||
608 | void quick_insert (unsigned, const T &); | ||||
609 | void ordered_remove (unsigned); | ||||
610 | void unordered_remove (unsigned); | ||||
611 | void block_remove (unsigned, unsigned); | ||||
612 | void qsort (int (*) (const void *, const void *))qsort (int (*) (const void *, const void *)); | ||||
613 | void sort (int (*) (const void *, const void *, void *), void *); | ||||
614 | void stablesort (int (*) (const void *, const void *, void *), void *); | ||||
615 | T *bsearch (const void *key, int (*compar) (const void *, const void *)); | ||||
616 | T *bsearch (const void *key, | ||||
617 | int (*compar)(const void *, const void *, void *), void *); | ||||
618 | unsigned lower_bound (const T &, bool (*) (const T &, const T &)) const; | ||||
619 | bool contains (const T &search) const; | ||||
620 | static size_t embedded_size (unsigned); | ||||
621 | void embedded_init (unsigned, unsigned = 0, unsigned = 0); | ||||
622 | void quick_grow (unsigned len); | ||||
623 | void quick_grow_cleared (unsigned len); | ||||
624 | |||||
625 | /* vec class can access our internal data and functions. */ | ||||
626 | template <typename, typename, typename> friend struct vec; | ||||
627 | |||||
628 | /* The allocator types also need access to our internals. */ | ||||
629 | friend struct va_gc; | ||||
630 | friend struct va_gc_atomic; | ||||
631 | friend struct va_heap; | ||||
632 | |||||
633 | /* FIXME - This field should be private, but we need to cater to | ||||
634 | compilers that have stricter notions of PODness for types. */ | ||||
635 | /* Align m_vecpfx to simplify address (). */ | ||||
636 | alignas (T) alignas (vec_prefix) vec_prefix m_vecpfx; | ||||
637 | }; | ||||
638 | |||||
639 | |||||
640 | /* Convenience wrapper functions to use when dealing with pointers to | ||||
641 | embedded vectors. Some functionality for these vectors must be | ||||
642 | provided via free functions for these reasons: | ||||
643 | |||||
644 | 1- The pointer may be NULL (e.g., before initial allocation). | ||||
645 | |||||
646 | 2- When the vector needs to grow, it must be reallocated, so | ||||
647 | the pointer will change its value. | ||||
648 | |||||
649 | Because of limitations with the current GC machinery, all vectors | ||||
650 | in GC memory *must* be pointers. */ | ||||
651 | |||||
652 | |||||
653 | /* If V contains no room for NELEMS elements, return false. Otherwise, | ||||
654 | return true. */ | ||||
655 | template<typename T, typename A> | ||||
656 | inline bool | ||||
657 | vec_safe_space (const vec<T, A, vl_embed> *v, unsigned nelems) | ||||
658 | { | ||||
659 | return v ? v->space (nelems) : nelems == 0; | ||||
660 | } | ||||
661 | |||||
662 | |||||
663 | /* If V is NULL, return 0. Otherwise, return V->length(). */ | ||||
664 | template<typename T, typename A> | ||||
665 | inline unsigned | ||||
666 | vec_safe_length (const vec<T, A, vl_embed> *v) | ||||
667 | { | ||||
668 | return v ? v->length () : 0; | ||||
669 | } | ||||
670 | |||||
671 | |||||
672 | /* If V is NULL, return NULL. Otherwise, return V->address(). */ | ||||
673 | template<typename T, typename A> | ||||
674 | inline T * | ||||
675 | vec_safe_address (vec<T, A, vl_embed> *v) | ||||
676 | { | ||||
677 | return v ? v->address () : NULLnullptr; | ||||
678 | } | ||||
679 | |||||
680 | |||||
681 | /* If V is NULL, return true. Otherwise, return V->is_empty(). */ | ||||
682 | template<typename T, typename A> | ||||
683 | inline bool | ||||
684 | vec_safe_is_empty (vec<T, A, vl_embed> *v) | ||||
685 | { | ||||
686 | return v ? v->is_empty () : true; | ||||
687 | } | ||||
688 | |||||
689 | /* If V does not have space for NELEMS elements, call | ||||
690 | V->reserve(NELEMS, EXACT). */ | ||||
691 | template<typename T, typename A> | ||||
692 | inline bool | ||||
693 | vec_safe_reserve (vec<T, A, vl_embed> *&v, unsigned nelems, bool exact = false | ||||
694 | CXX_MEM_STAT_INFO) | ||||
695 | { | ||||
696 | bool extend = nelems ? !vec_safe_space (v, nelems) : false; | ||||
697 | if (extend
| ||||
698 | A::reserve (v, nelems, exact PASS_MEM_STAT); | ||||
699 | return extend; | ||||
700 | } | ||||
701 | |||||
702 | template<typename T, typename A> | ||||
703 | inline bool | ||||
704 | vec_safe_reserve_exact (vec<T, A, vl_embed> *&v, unsigned nelems | ||||
705 | CXX_MEM_STAT_INFO) | ||||
706 | { | ||||
707 | return vec_safe_reserve (v, nelems, true PASS_MEM_STAT); | ||||
708 | } | ||||
709 | |||||
710 | |||||
711 | /* Allocate GC memory for V with space for NELEMS slots. If NELEMS | ||||
712 | is 0, V is initialized to NULL. */ | ||||
713 | |||||
714 | template<typename T, typename A> | ||||
715 | inline void | ||||
716 | vec_alloc (vec<T, A, vl_embed> *&v, unsigned nelems CXX_MEM_STAT_INFO) | ||||
717 | { | ||||
718 | v = NULLnullptr; | ||||
719 | vec_safe_reserve (v, nelems, false PASS_MEM_STAT); | ||||
720 | } | ||||
721 | |||||
722 | |||||
723 | /* Free the GC memory allocated by vector V and set it to NULL. */ | ||||
724 | |||||
725 | template<typename T, typename A> | ||||
726 | inline void | ||||
727 | vec_free (vec<T, A, vl_embed> *&v) | ||||
728 | { | ||||
729 | A::release (v); | ||||
730 | } | ||||
731 | |||||
732 | |||||
733 | /* Grow V to length LEN. Allocate it, if necessary. */ | ||||
734 | template<typename T, typename A> | ||||
735 | inline void | ||||
736 | vec_safe_grow (vec<T, A, vl_embed> *&v, unsigned len, | ||||
737 | bool exact = false CXX_MEM_STAT_INFO) | ||||
738 | { | ||||
739 | unsigned oldlen = vec_safe_length (v); | ||||
740 | gcc_checking_assert (len >= oldlen)((void)(!(len >= oldlen) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 740, __FUNCTION__), 0 : 0)); | ||||
741 | vec_safe_reserve (v, len - oldlen, exact PASS_MEM_STAT); | ||||
742 | v->quick_grow (len); | ||||
743 | } | ||||
744 | |||||
745 | |||||
746 | /* If V is NULL, allocate it. Call V->safe_grow_cleared(LEN). */ | ||||
747 | template<typename T, typename A> | ||||
748 | inline void | ||||
749 | vec_safe_grow_cleared (vec<T, A, vl_embed> *&v, unsigned len, | ||||
750 | bool exact = false CXX_MEM_STAT_INFO) | ||||
751 | { | ||||
752 | unsigned oldlen = vec_safe_length (v); | ||||
753 | vec_safe_grow (v, len, exact PASS_MEM_STAT); | ||||
754 | vec_default_construct (v->address () + oldlen, len - oldlen); | ||||
755 | } | ||||
756 | |||||
757 | |||||
758 | /* Assume V is not NULL. */ | ||||
759 | |||||
760 | template<typename T> | ||||
761 | inline void | ||||
762 | vec_safe_grow_cleared (vec<T, va_heap, vl_ptr> *&v, | ||||
763 | unsigned len, bool exact = false CXX_MEM_STAT_INFO) | ||||
764 | { | ||||
765 | v->safe_grow_cleared (len, exact PASS_MEM_STAT); | ||||
766 | } | ||||
767 | |||||
768 | /* If V does not have space for NELEMS elements, call | ||||
769 | V->reserve(NELEMS, EXACT). */ | ||||
770 | |||||
771 | template<typename T> | ||||
772 | inline bool | ||||
773 | vec_safe_reserve (vec<T, va_heap, vl_ptr> *&v, unsigned nelems, bool exact = false | ||||
774 | CXX_MEM_STAT_INFO) | ||||
775 | { | ||||
776 | return v->reserve (nelems, exact); | ||||
777 | } | ||||
778 | |||||
779 | |||||
780 | /* If V is NULL return false, otherwise return V->iterate(IX, PTR). */ | ||||
781 | template<typename T, typename A> | ||||
782 | inline bool | ||||
783 | vec_safe_iterate (const vec<T, A, vl_embed> *v, unsigned ix, T **ptr) | ||||
784 | { | ||||
785 | if (v) | ||||
786 | return v->iterate (ix, ptr); | ||||
787 | else | ||||
788 | { | ||||
789 | *ptr = 0; | ||||
790 | return false; | ||||
791 | } | ||||
792 | } | ||||
793 | |||||
794 | template<typename T, typename A> | ||||
795 | inline bool | ||||
796 | vec_safe_iterate (const vec<T, A, vl_embed> *v, unsigned ix, T *ptr) | ||||
797 | { | ||||
798 | if (v) | ||||
799 | return v->iterate (ix, ptr); | ||||
800 | else | ||||
801 | { | ||||
802 | *ptr = 0; | ||||
803 | return false; | ||||
804 | } | ||||
805 | } | ||||
806 | |||||
807 | |||||
808 | /* If V has no room for one more element, reallocate it. Then call | ||||
809 | V->quick_push(OBJ). */ | ||||
810 | template<typename T, typename A> | ||||
811 | inline T * | ||||
812 | vec_safe_push (vec<T, A, vl_embed> *&v, const T &obj CXX_MEM_STAT_INFO) | ||||
813 | { | ||||
814 | vec_safe_reserve (v, 1, false PASS_MEM_STAT); | ||||
815 | return v->quick_push (obj); | ||||
816 | } | ||||
817 | |||||
818 | |||||
819 | /* if V has no room for one more element, reallocate it. Then call | ||||
820 | V->quick_insert(IX, OBJ). */ | ||||
821 | template<typename T, typename A> | ||||
822 | inline void | ||||
823 | vec_safe_insert (vec<T, A, vl_embed> *&v, unsigned ix, const T &obj | ||||
824 | CXX_MEM_STAT_INFO) | ||||
825 | { | ||||
826 | vec_safe_reserve (v, 1, false PASS_MEM_STAT); | ||||
827 | v->quick_insert (ix, obj); | ||||
828 | } | ||||
829 | |||||
830 | |||||
831 | /* If V is NULL, do nothing. Otherwise, call V->truncate(SIZE). */ | ||||
832 | template<typename T, typename A> | ||||
833 | inline void | ||||
834 | vec_safe_truncate (vec<T, A, vl_embed> *v, unsigned size) | ||||
835 | { | ||||
836 | if (v) | ||||
837 | v->truncate (size); | ||||
838 | } | ||||
839 | |||||
840 | |||||
841 | /* If SRC is not NULL, return a pointer to a copy of it. */ | ||||
842 | template<typename T, typename A> | ||||
843 | inline vec<T, A, vl_embed> * | ||||
844 | vec_safe_copy (vec<T, A, vl_embed> *src CXX_MEM_STAT_INFO) | ||||
845 | { | ||||
846 | return src ? src->copy (ALONE_PASS_MEM_STAT) : NULLnullptr; | ||||
847 | } | ||||
848 | |||||
849 | /* Copy the elements from SRC to the end of DST as if by memcpy. | ||||
850 | Reallocate DST, if necessary. */ | ||||
851 | template<typename T, typename A> | ||||
852 | inline void | ||||
853 | vec_safe_splice (vec<T, A, vl_embed> *&dst, const vec<T, A, vl_embed> *src | ||||
854 | CXX_MEM_STAT_INFO) | ||||
855 | { | ||||
856 | unsigned src_len = vec_safe_length (src); | ||||
857 | if (src_len) | ||||
858 | { | ||||
859 | vec_safe_reserve_exact (dst, vec_safe_length (dst) + src_len | ||||
860 | PASS_MEM_STAT); | ||||
861 | dst->splice (*src); | ||||
862 | } | ||||
863 | } | ||||
864 | |||||
865 | /* Return true if SEARCH is an element of V. Note that this is O(N) in the | ||||
866 | size of the vector and so should be used with care. */ | ||||
867 | |||||
868 | template<typename T, typename A> | ||||
869 | inline bool | ||||
870 | vec_safe_contains (vec<T, A, vl_embed> *v, const T &search) | ||||
871 | { | ||||
872 | return v ? v->contains (search) : false; | ||||
873 | } | ||||
874 | |||||
875 | /* Index into vector. Return the IX'th element. IX must be in the | ||||
876 | domain of the vector. */ | ||||
877 | |||||
878 | template<typename T, typename A> | ||||
879 | inline const T & | ||||
880 | vec<T, A, vl_embed>::operator[] (unsigned ix) const | ||||
881 | { | ||||
882 | gcc_checking_assert (ix < m_vecpfx.m_num)((void)(!(ix < m_vecpfx.m_num) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 882, __FUNCTION__), 0 : 0)); | ||||
883 | return address ()[ix]; | ||||
884 | } | ||||
885 | |||||
886 | template<typename T, typename A> | ||||
887 | inline T & | ||||
888 | vec<T, A, vl_embed>::operator[] (unsigned ix) | ||||
889 | { | ||||
890 | gcc_checking_assert (ix < m_vecpfx.m_num)((void)(!(ix < m_vecpfx.m_num) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 890, __FUNCTION__), 0 : 0)); | ||||
891 | return address ()[ix]; | ||||
892 | } | ||||
893 | |||||
894 | |||||
895 | /* Get the final element of the vector, which must not be empty. */ | ||||
896 | |||||
897 | template<typename T, typename A> | ||||
898 | inline T & | ||||
899 | vec<T, A, vl_embed>::last (void) | ||||
900 | { | ||||
901 | gcc_checking_assert (m_vecpfx.m_num > 0)((void)(!(m_vecpfx.m_num > 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 901, __FUNCTION__), 0 : 0)); | ||||
902 | return (*this)[m_vecpfx.m_num - 1]; | ||||
903 | } | ||||
904 | |||||
905 | |||||
906 | /* If this vector has space for NELEMS additional entries, return | ||||
907 | true. You usually only need to use this if you are doing your | ||||
908 | own vector reallocation, for instance on an embedded vector. This | ||||
909 | returns true in exactly the same circumstances that vec::reserve | ||||
910 | will. */ | ||||
911 | |||||
912 | template<typename T, typename A> | ||||
913 | inline bool | ||||
914 | vec<T, A, vl_embed>::space (unsigned nelems) const | ||||
915 | { | ||||
916 | return m_vecpfx.m_alloc - m_vecpfx.m_num >= nelems; | ||||
917 | } | ||||
918 | |||||
919 | |||||
920 | /* Return iteration condition and update *PTR to (a copy of) the IX'th | ||||
921 | element of this vector. Use this to iterate over the elements of a | ||||
922 | vector as follows, | ||||
923 | |||||
924 | for (ix = 0; v->iterate (ix, &val); ix++) | ||||
925 | continue; */ | ||||
926 | |||||
927 | template<typename T, typename A> | ||||
928 | inline bool | ||||
929 | vec<T, A, vl_embed>::iterate (unsigned ix, T *ptr) const | ||||
930 | { | ||||
931 | if (ix < m_vecpfx.m_num) | ||||
932 | { | ||||
933 | *ptr = address ()[ix]; | ||||
934 | return true; | ||||
935 | } | ||||
936 | else | ||||
937 | { | ||||
938 | *ptr = 0; | ||||
939 | return false; | ||||
940 | } | ||||
941 | } | ||||
942 | |||||
943 | |||||
944 | /* Return iteration condition and update *PTR to point to the | ||||
945 | IX'th element of this vector. Use this to iterate over the | ||||
946 | elements of a vector as follows, | ||||
947 | |||||
948 | for (ix = 0; v->iterate (ix, &ptr); ix++) | ||||
949 | continue; | ||||
950 | |||||
951 | This variant is for vectors of objects. */ | ||||
952 | |||||
953 | template<typename T, typename A> | ||||
954 | inline bool | ||||
955 | vec<T, A, vl_embed>::iterate (unsigned ix, T **ptr) const | ||||
956 | { | ||||
957 | if (ix < m_vecpfx.m_num) | ||||
958 | { | ||||
959 | *ptr = CONST_CAST (T *, &address ()[ix])(const_cast<T *> ((&address ()[ix]))); | ||||
960 | return true; | ||||
961 | } | ||||
962 | else | ||||
963 | { | ||||
964 | *ptr = 0; | ||||
965 | return false; | ||||
966 | } | ||||
967 | } | ||||
968 | |||||
969 | |||||
970 | /* Return a pointer to a copy of this vector. */ | ||||
971 | |||||
972 | template<typename T, typename A> | ||||
973 | inline vec<T, A, vl_embed> * | ||||
974 | vec<T, A, vl_embed>::copy (ALONE_MEM_STAT_DECLvoid) const | ||||
975 | { | ||||
976 | vec<T, A, vl_embed> *new_vec = NULLnullptr; | ||||
977 | unsigned len = length (); | ||||
978 | if (len) | ||||
979 | { | ||||
980 | vec_alloc (new_vec, len PASS_MEM_STAT); | ||||
981 | new_vec->embedded_init (len, len); | ||||
982 | vec_copy_construct (new_vec->address (), address (), len); | ||||
983 | } | ||||
984 | return new_vec; | ||||
985 | } | ||||
986 | |||||
987 | |||||
988 | /* Copy the elements from SRC to the end of this vector as if by memcpy. | ||||
989 | The vector must have sufficient headroom available. */ | ||||
990 | |||||
991 | template<typename T, typename A> | ||||
992 | inline void | ||||
993 | vec<T, A, vl_embed>::splice (const vec<T, A, vl_embed> &src) | ||||
994 | { | ||||
995 | unsigned len = src.length (); | ||||
996 | if (len) | ||||
997 | { | ||||
998 | gcc_checking_assert (space (len))((void)(!(space (len)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 998, __FUNCTION__), 0 : 0)); | ||||
999 | vec_copy_construct (end (), src.address (), len); | ||||
1000 | m_vecpfx.m_num += len; | ||||
1001 | } | ||||
1002 | } | ||||
1003 | |||||
1004 | template<typename T, typename A> | ||||
1005 | inline void | ||||
1006 | vec<T, A, vl_embed>::splice (const vec<T, A, vl_embed> *src) | ||||
1007 | { | ||||
1008 | if (src) | ||||
1009 | splice (*src); | ||||
1010 | } | ||||
1011 | |||||
1012 | |||||
1013 | /* Push OBJ (a new element) onto the end of the vector. There must be | ||||
1014 | sufficient space in the vector. Return a pointer to the slot | ||||
1015 | where OBJ was inserted. */ | ||||
1016 | |||||
1017 | template<typename T, typename A> | ||||
1018 | inline T * | ||||
1019 | vec<T, A, vl_embed>::quick_push (const T &obj) | ||||
1020 | { | ||||
1021 | gcc_checking_assert (space (1))((void)(!(space (1)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 1021, __FUNCTION__), 0 : 0)); | ||||
1022 | T *slot = &address ()[m_vecpfx.m_num++]; | ||||
1023 | *slot = obj; | ||||
1024 | return slot; | ||||
1025 | } | ||||
1026 | |||||
1027 | |||||
1028 | /* Pop and return the last element off the end of the vector. */ | ||||
1029 | |||||
1030 | template<typename T, typename A> | ||||
1031 | inline T & | ||||
1032 | vec<T, A, vl_embed>::pop (void) | ||||
1033 | { | ||||
1034 | gcc_checking_assert (length () > 0)((void)(!(length () > 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 1034, __FUNCTION__), 0 : 0)); | ||||
1035 | return address ()[--m_vecpfx.m_num]; | ||||
1036 | } | ||||
1037 | |||||
1038 | |||||
1039 | /* Set the length of the vector to SIZE. The new length must be less | ||||
1040 | than or equal to the current length. This is an O(1) operation. */ | ||||
1041 | |||||
1042 | template<typename T, typename A> | ||||
1043 | inline void | ||||
1044 | vec<T, A, vl_embed>::truncate (unsigned size) | ||||
1045 | { | ||||
1046 | gcc_checking_assert (length () >= size)((void)(!(length () >= size) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 1046, __FUNCTION__), 0 : 0)); | ||||
1047 | m_vecpfx.m_num = size; | ||||
1048 | } | ||||
1049 | |||||
1050 | |||||
1051 | /* Insert an element, OBJ, at the IXth position of this vector. There | ||||
1052 | must be sufficient space. */ | ||||
1053 | |||||
1054 | template<typename T, typename A> | ||||
1055 | inline void | ||||
1056 | vec<T, A, vl_embed>::quick_insert (unsigned ix, const T &obj) | ||||
1057 | { | ||||
1058 | gcc_checking_assert (length () < allocated ())((void)(!(length () < allocated ()) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 1058, __FUNCTION__), 0 : 0)); | ||||
1059 | gcc_checking_assert (ix <= length ())((void)(!(ix <= length ()) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 1059, __FUNCTION__), 0 : 0)); | ||||
1060 | T *slot = &address ()[ix]; | ||||
1061 | memmove (slot + 1, slot, (m_vecpfx.m_num++ - ix) * sizeof (T)); | ||||
1062 | *slot = obj; | ||||
1063 | } | ||||
1064 | |||||
1065 | |||||
1066 | /* Remove an element from the IXth position of this vector. Ordering of | ||||
1067 | remaining elements is preserved. This is an O(N) operation due to | ||||
1068 | memmove. */ | ||||
1069 | |||||
1070 | template<typename T, typename A> | ||||
1071 | inline void | ||||
1072 | vec<T, A, vl_embed>::ordered_remove (unsigned ix) | ||||
1073 | { | ||||
1074 | gcc_checking_assert (ix < length ())((void)(!(ix < length ()) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 1074, __FUNCTION__), 0 : 0)); | ||||
1075 | T *slot = &address ()[ix]; | ||||
1076 | memmove (slot, slot + 1, (--m_vecpfx.m_num - ix) * sizeof (T)); | ||||
1077 | } | ||||
1078 | |||||
1079 | |||||
1080 | /* Remove elements in [START, END) from VEC for which COND holds. Ordering of | ||||
1081 | remaining elements is preserved. This is an O(N) operation. */ | ||||
1082 | |||||
1083 | #define VEC_ORDERED_REMOVE_IF_FROM_TO(vec, read_index, write_index, \{ ((void)(!((end) <= (vec).length ()) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 1084, __FUNCTION__), 0 : 0)); for (read_index = write_index = (start); read_index < (end); ++read_index) { elem_ptr = &(vec)[read_index]; bool remove_p = (cond); if (remove_p ) continue; if (read_index != write_index) (vec)[write_index] = (vec)[read_index]; write_index++; } if (read_index - write_index > 0) (vec).block_remove (write_index, read_index - write_index ); } | ||||
1084 | elem_ptr, start, end, cond){ ((void)(!((end) <= (vec).length ()) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 1084, __FUNCTION__), 0 : 0)); for (read_index = write_index = (start); read_index < (end); ++read_index) { elem_ptr = &(vec)[read_index]; bool remove_p = (cond); if (remove_p ) continue; if (read_index != write_index) (vec)[write_index] = (vec)[read_index]; write_index++; } if (read_index - write_index > 0) (vec).block_remove (write_index, read_index - write_index ); } \ | ||||
1085 | { \ | ||||
1086 | gcc_assert ((end) <= (vec).length ())((void)(!((end) <= (vec).length ()) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 1086, __FUNCTION__), 0 : 0)); \ | ||||
1087 | for (read_index = write_index = (start); read_index < (end); \ | ||||
1088 | ++read_index) \ | ||||
1089 | { \ | ||||
1090 | elem_ptr = &(vec)[read_index]; \ | ||||
1091 | bool remove_p = (cond); \ | ||||
1092 | if (remove_p) \ | ||||
1093 | continue; \ | ||||
1094 | \ | ||||
1095 | if (read_index != write_index) \ | ||||
1096 | (vec)[write_index] = (vec)[read_index]; \ | ||||
1097 | \ | ||||
1098 | write_index++; \ | ||||
1099 | } \ | ||||
1100 | \ | ||||
1101 | if (read_index - write_index > 0) \ | ||||
1102 | (vec).block_remove (write_index, read_index - write_index); \ | ||||
1103 | } | ||||
1104 | |||||
1105 | |||||
1106 | /* Remove elements from VEC for which COND holds. Ordering of remaining | ||||
1107 | elements is preserved. This is an O(N) operation. */ | ||||
1108 | |||||
1109 | #define VEC_ORDERED_REMOVE_IF(vec, read_index, write_index, elem_ptr, \{ ((void)(!(((vec).length ()) <= ((vec)).length ()) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 1110, __FUNCTION__), 0 : 0)); for (read_index = write_index = (0); read_index < ((vec).length ()); ++read_index) { elem_ptr = &((vec))[read_index]; bool remove_p = ((cond)); if (remove_p ) continue; if (read_index != write_index) ((vec))[write_index ] = ((vec))[read_index]; write_index++; } if (read_index - write_index > 0) ((vec)).block_remove (write_index, read_index - write_index ); } | ||||
1110 | cond){ ((void)(!(((vec).length ()) <= ((vec)).length ()) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 1110, __FUNCTION__), 0 : 0)); for (read_index = write_index = (0); read_index < ((vec).length ()); ++read_index) { elem_ptr = &((vec))[read_index]; bool remove_p = ((cond)); if (remove_p ) continue; if (read_index != write_index) ((vec))[write_index ] = ((vec))[read_index]; write_index++; } if (read_index - write_index > 0) ((vec)).block_remove (write_index, read_index - write_index ); } \ | ||||
1111 | VEC_ORDERED_REMOVE_IF_FROM_TO ((vec), read_index, write_index, \{ ((void)(!(((vec).length ()) <= ((vec)).length ()) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 1112, __FUNCTION__), 0 : 0)); for (read_index = write_index = (0); read_index < ((vec).length ()); ++read_index) { elem_ptr = &((vec))[read_index]; bool remove_p = ((cond)); if (remove_p ) continue; if (read_index != write_index) ((vec))[write_index ] = ((vec))[read_index]; write_index++; } if (read_index - write_index > 0) ((vec)).block_remove (write_index, read_index - write_index ); } | ||||
1112 | elem_ptr, 0, (vec).length (), (cond)){ ((void)(!(((vec).length ()) <= ((vec)).length ()) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 1112, __FUNCTION__), 0 : 0)); for (read_index = write_index = (0); read_index < ((vec).length ()); ++read_index) { elem_ptr = &((vec))[read_index]; bool remove_p = ((cond)); if (remove_p ) continue; if (read_index != write_index) ((vec))[write_index ] = ((vec))[read_index]; write_index++; } if (read_index - write_index > 0) ((vec)).block_remove (write_index, read_index - write_index ); } | ||||
1113 | |||||
1114 | /* Remove an element from the IXth position of this vector. Ordering of | ||||
1115 | remaining elements is destroyed. This is an O(1) operation. */ | ||||
1116 | |||||
1117 | template<typename T, typename A> | ||||
1118 | inline void | ||||
1119 | vec<T, A, vl_embed>::unordered_remove (unsigned ix) | ||||
1120 | { | ||||
1121 | gcc_checking_assert (ix < length ())((void)(!(ix < length ()) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 1121, __FUNCTION__), 0 : 0)); | ||||
1122 | T *p = address (); | ||||
1123 | p[ix] = p[--m_vecpfx.m_num]; | ||||
1124 | } | ||||
1125 | |||||
1126 | |||||
1127 | /* Remove LEN elements starting at the IXth. Ordering is retained. | ||||
1128 | This is an O(N) operation due to memmove. */ | ||||
1129 | |||||
1130 | template<typename T, typename A> | ||||
1131 | inline void | ||||
1132 | vec<T, A, vl_embed>::block_remove (unsigned ix, unsigned len) | ||||
1133 | { | ||||
1134 | gcc_checking_assert (ix + len <= length ())((void)(!(ix + len <= length ()) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 1134, __FUNCTION__), 0 : 0)); | ||||
1135 | T *slot = &address ()[ix]; | ||||
1136 | m_vecpfx.m_num -= len; | ||||
1137 | memmove (slot, slot + len, (m_vecpfx.m_num - ix) * sizeof (T)); | ||||
1138 | } | ||||
1139 | |||||
1140 | |||||
1141 | /* Sort the contents of this vector with qsort. CMP is the comparison | ||||
1142 | function to pass to qsort. */ | ||||
1143 | |||||
1144 | template<typename T, typename A> | ||||
1145 | inline void | ||||
1146 | vec<T, A, vl_embed>::qsort (int (*cmp) (const void *, const void *))qsort (int (*cmp) (const void *, const void *)) | ||||
1147 | { | ||||
1148 | if (length () > 1) | ||||
1149 | gcc_qsort (address (), length (), sizeof (T), cmp); | ||||
1150 | } | ||||
1151 | |||||
1152 | /* Sort the contents of this vector with qsort. CMP is the comparison | ||||
1153 | function to pass to qsort. */ | ||||
1154 | |||||
1155 | template<typename T, typename A> | ||||
1156 | inline void | ||||
1157 | vec<T, A, vl_embed>::sort (int (*cmp) (const void *, const void *, void *), | ||||
1158 | void *data) | ||||
1159 | { | ||||
1160 | if (length () > 1) | ||||
1161 | gcc_sort_r (address (), length (), sizeof (T), cmp, data); | ||||
1162 | } | ||||
1163 | |||||
1164 | /* Sort the contents of this vector with gcc_stablesort_r. CMP is the | ||||
1165 | comparison function to pass to qsort. */ | ||||
1166 | |||||
1167 | template<typename T, typename A> | ||||
1168 | inline void | ||||
1169 | vec<T, A, vl_embed>::stablesort (int (*cmp) (const void *, const void *, | ||||
1170 | void *), void *data) | ||||
1171 | { | ||||
1172 | if (length () > 1) | ||||
1173 | gcc_stablesort_r (address (), length (), sizeof (T), cmp, data); | ||||
1174 | } | ||||
1175 | |||||
1176 | /* Search the contents of the sorted vector with a binary search. | ||||
1177 | CMP is the comparison function to pass to bsearch. */ | ||||
1178 | |||||
1179 | template<typename T, typename A> | ||||
1180 | inline T * | ||||
1181 | vec<T, A, vl_embed>::bsearch (const void *key, | ||||
1182 | int (*compar) (const void *, const void *)) | ||||
1183 | { | ||||
1184 | const void *base = this->address (); | ||||
1185 | size_t nmemb = this->length (); | ||||
1186 | size_t size = sizeof (T); | ||||
1187 | /* The following is a copy of glibc stdlib-bsearch.h. */ | ||||
1188 | size_t l, u, idx; | ||||
1189 | const void *p; | ||||
1190 | int comparison; | ||||
1191 | |||||
1192 | l = 0; | ||||
1193 | u = nmemb; | ||||
1194 | while (l < u) | ||||
1195 | { | ||||
1196 | idx = (l + u) / 2; | ||||
1197 | p = (const void *) (((const char *) base) + (idx * size)); | ||||
1198 | comparison = (*compar) (key, p); | ||||
1199 | if (comparison < 0) | ||||
1200 | u = idx; | ||||
1201 | else if (comparison > 0) | ||||
1202 | l = idx + 1; | ||||
1203 | else | ||||
1204 | return (T *)const_cast<void *>(p); | ||||
1205 | } | ||||
1206 | |||||
1207 | return NULLnullptr; | ||||
1208 | } | ||||
1209 | |||||
1210 | /* Search the contents of the sorted vector with a binary search. | ||||
1211 | CMP is the comparison function to pass to bsearch. */ | ||||
1212 | |||||
1213 | template<typename T, typename A> | ||||
1214 | inline T * | ||||
1215 | vec<T, A, vl_embed>::bsearch (const void *key, | ||||
1216 | int (*compar) (const void *, const void *, | ||||
1217 | void *), void *data) | ||||
1218 | { | ||||
1219 | const void *base = this->address (); | ||||
1220 | size_t nmemb = this->length (); | ||||
1221 | size_t size = sizeof (T); | ||||
1222 | /* The following is a copy of glibc stdlib-bsearch.h. */ | ||||
1223 | size_t l, u, idx; | ||||
1224 | const void *p; | ||||
1225 | int comparison; | ||||
1226 | |||||
1227 | l = 0; | ||||
1228 | u = nmemb; | ||||
1229 | while (l < u) | ||||
1230 | { | ||||
1231 | idx = (l + u) / 2; | ||||
1232 | p = (const void *) (((const char *) base) + (idx * size)); | ||||
1233 | comparison = (*compar) (key, p, data); | ||||
1234 | if (comparison < 0) | ||||
1235 | u = idx; | ||||
1236 | else if (comparison > 0) | ||||
1237 | l = idx + 1; | ||||
1238 | else | ||||
1239 | return (T *)const_cast<void *>(p); | ||||
1240 | } | ||||
1241 | |||||
1242 | return NULLnullptr; | ||||
1243 | } | ||||
1244 | |||||
1245 | /* Return true if SEARCH is an element of V. Note that this is O(N) in the | ||||
1246 | size of the vector and so should be used with care. */ | ||||
1247 | |||||
1248 | template<typename T, typename A> | ||||
1249 | inline bool | ||||
1250 | vec<T, A, vl_embed>::contains (const T &search) const | ||||
1251 | { | ||||
1252 | unsigned int len = length (); | ||||
1253 | const T *p = address (); | ||||
1254 | for (unsigned int i = 0; i < len; i++) | ||||
1255 | { | ||||
1256 | const T *slot = &p[i]; | ||||
1257 | if (*slot == search) | ||||
1258 | return true; | ||||
1259 | } | ||||
1260 | |||||
1261 | return false; | ||||
1262 | } | ||||
1263 | |||||
1264 | /* Find and return the first position in which OBJ could be inserted | ||||
1265 | without changing the ordering of this vector. LESSTHAN is a | ||||
1266 | function that returns true if the first argument is strictly less | ||||
1267 | than the second. */ | ||||
1268 | |||||
1269 | template<typename T, typename A> | ||||
1270 | unsigned | ||||
1271 | vec<T, A, vl_embed>::lower_bound (const T &obj, | ||||
1272 | bool (*lessthan)(const T &, const T &)) | ||||
1273 | const | ||||
1274 | { | ||||
1275 | unsigned int len = length (); | ||||
1276 | unsigned int half, middle; | ||||
1277 | unsigned int first = 0; | ||||
1278 | while (len > 0) | ||||
1279 | { | ||||
1280 | half = len / 2; | ||||
1281 | middle = first; | ||||
1282 | middle += half; | ||||
1283 | const T &middle_elem = address ()[middle]; | ||||
1284 | if (lessthan (middle_elem, obj)) | ||||
1285 | { | ||||
1286 | first = middle; | ||||
1287 | ++first; | ||||
1288 | len = len - half - 1; | ||||
1289 | } | ||||
1290 | else | ||||
1291 | len = half; | ||||
1292 | } | ||||
1293 | return first; | ||||
1294 | } | ||||
1295 | |||||
1296 | |||||
1297 | /* Return the number of bytes needed to embed an instance of an | ||||
1298 | embeddable vec inside another data structure. | ||||
1299 | |||||
1300 | Use these methods to determine the required size and initialization | ||||
1301 | of a vector V of type T embedded within another structure (as the | ||||
1302 | final member): | ||||
1303 | |||||
1304 | size_t vec<T, A, vl_embed>::embedded_size (unsigned alloc); | ||||
1305 | void v->embedded_init (unsigned alloc, unsigned num); | ||||
1306 | |||||
1307 | These allow the caller to perform the memory allocation. */ | ||||
1308 | |||||
1309 | template<typename T, typename A> | ||||
1310 | inline size_t | ||||
1311 | vec<T, A, vl_embed>::embedded_size (unsigned alloc) | ||||
1312 | { | ||||
1313 | struct alignas (T) U { char data[sizeof (T)]; }; | ||||
1314 | typedef vec<U, A, vl_embed> vec_embedded; | ||||
1315 | typedef typename std::conditional<std::is_standard_layout<T>::value, | ||||
1316 | vec, vec_embedded>::type vec_stdlayout; | ||||
1317 | static_assert (sizeof (vec_stdlayout) == sizeof (vec), ""); | ||||
1318 | static_assert (alignof (vec_stdlayout) == alignof (vec), ""); | ||||
1319 | return sizeof (vec_stdlayout) + alloc * sizeof (T); | ||||
1320 | } | ||||
1321 | |||||
1322 | |||||
1323 | /* Initialize the vector to contain room for ALLOC elements and | ||||
1324 | NUM active elements. */ | ||||
1325 | |||||
1326 | template<typename T, typename A> | ||||
1327 | inline void | ||||
1328 | vec<T, A, vl_embed>::embedded_init (unsigned alloc, unsigned num, unsigned aut) | ||||
1329 | { | ||||
1330 | m_vecpfx.m_alloc = alloc; | ||||
1331 | m_vecpfx.m_using_auto_storage = aut; | ||||
1332 | m_vecpfx.m_num = num; | ||||
1333 | } | ||||
1334 | |||||
1335 | |||||
1336 | /* Grow the vector to a specific length. LEN must be as long or longer than | ||||
1337 | the current length. The new elements are uninitialized. */ | ||||
1338 | |||||
1339 | template<typename T, typename A> | ||||
1340 | inline void | ||||
1341 | vec<T, A, vl_embed>::quick_grow (unsigned len) | ||||
1342 | { | ||||
1343 | gcc_checking_assert (length () <= len && len <= m_vecpfx.m_alloc)((void)(!(length () <= len && len <= m_vecpfx.m_alloc ) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 1343, __FUNCTION__), 0 : 0)); | ||||
1344 | m_vecpfx.m_num = len; | ||||
1345 | } | ||||
1346 | |||||
1347 | |||||
1348 | /* Grow the vector to a specific length. LEN must be as long or longer than | ||||
1349 | the current length. The new elements are initialized to zero. */ | ||||
1350 | |||||
1351 | template<typename T, typename A> | ||||
1352 | inline void | ||||
1353 | vec<T, A, vl_embed>::quick_grow_cleared (unsigned len) | ||||
1354 | { | ||||
1355 | unsigned oldlen = length (); | ||||
1356 | size_t growby = len - oldlen; | ||||
1357 | quick_grow (len); | ||||
1358 | if (growby != 0) | ||||
1359 | vec_default_construct (address () + oldlen, growby); | ||||
1360 | } | ||||
1361 | |||||
1362 | /* Garbage collection support for vec<T, A, vl_embed>. */ | ||||
1363 | |||||
1364 | template<typename T> | ||||
1365 | void | ||||
1366 | gt_ggc_mx (vec<T, va_gc> *v) | ||||
1367 | { | ||||
1368 | extern void gt_ggc_mx (T &); | ||||
1369 | for (unsigned i = 0; i < v->length (); i++) | ||||
1370 | gt_ggc_mx ((*v)[i]); | ||||
1371 | } | ||||
1372 | |||||
1373 | template<typename T> | ||||
1374 | void | ||||
1375 | gt_ggc_mx (vec<T, va_gc_atomic, vl_embed> *v ATTRIBUTE_UNUSED__attribute__ ((__unused__))) | ||||
1376 | { | ||||
1377 | /* Nothing to do. Vectors of atomic types wrt GC do not need to | ||||
1378 | be traversed. */ | ||||
1379 | } | ||||
1380 | |||||
1381 | |||||
1382 | /* PCH support for vec<T, A, vl_embed>. */ | ||||
1383 | |||||
1384 | template<typename T, typename A> | ||||
1385 | void | ||||
1386 | gt_pch_nx (vec<T, A, vl_embed> *v) | ||||
1387 | { | ||||
1388 | extern void gt_pch_nx (T &); | ||||
1389 | for (unsigned i = 0; i < v->length (); i++) | ||||
1390 | gt_pch_nx ((*v)[i]); | ||||
1391 | } | ||||
1392 | |||||
1393 | template<typename T, typename A> | ||||
1394 | void | ||||
1395 | gt_pch_nx (vec<T *, A, vl_embed> *v, gt_pointer_operator op, void *cookie) | ||||
1396 | { | ||||
1397 | for (unsigned i = 0; i < v->length (); i++) | ||||
1398 | op (&((*v)[i]), NULLnullptr, cookie); | ||||
1399 | } | ||||
1400 | |||||
1401 | template<typename T, typename A> | ||||
1402 | void | ||||
1403 | gt_pch_nx (vec<T, A, vl_embed> *v, gt_pointer_operator op, void *cookie) | ||||
1404 | { | ||||
1405 | extern void gt_pch_nx (T *, gt_pointer_operator, void *); | ||||
1406 | for (unsigned i = 0; i < v->length (); i++) | ||||
1407 | gt_pch_nx (&((*v)[i]), op, cookie); | ||||
1408 | } | ||||
1409 | |||||
1410 | |||||
1411 | /* Space efficient vector. These vectors can grow dynamically and are | ||||
1412 | allocated together with their control data. They are suited to be | ||||
1413 | included in data structures. Prior to initial allocation, they | ||||
1414 | only take a single word of storage. | ||||
1415 | |||||
1416 | These vectors are implemented as a pointer to an embeddable vector. | ||||
1417 | The semantics allow for this pointer to be NULL to represent empty | ||||
1418 | vectors. This way, empty vectors occupy minimal space in the | ||||
1419 | structure containing them. | ||||
1420 | |||||
1421 | Properties: | ||||
1422 | |||||
1423 | - The whole vector and control data are allocated in a single | ||||
1424 | contiguous block. | ||||
1425 | - The whole vector may be re-allocated. | ||||
1426 | - Vector data may grow and shrink. | ||||
1427 | - Access and manipulation requires a pointer test and | ||||
1428 | indirection. | ||||
1429 | - It requires 1 word of storage (prior to vector allocation). | ||||
1430 | |||||
1431 | |||||
1432 | Limitations: | ||||
1433 | |||||
1434 | These vectors must be PODs because they are stored in unions. | ||||
1435 | (http://en.wikipedia.org/wiki/Plain_old_data_structures). | ||||
1436 | As long as we use C++03, we cannot have constructors nor | ||||
1437 | destructors in classes that are stored in unions. */ | ||||
1438 | |||||
1439 | template<typename T, size_t N = 0> | ||||
1440 | class auto_vec; | ||||
1441 | |||||
1442 | template<typename T> | ||||
1443 | struct vec<T, va_heap, vl_ptr> | ||||
1444 | { | ||||
1445 | public: | ||||
1446 | /* Default ctors to ensure triviality. Use value-initialization | ||||
1447 | (e.g., vec() or vec v{ };) or vNULL to create a zero-initialized | ||||
1448 | instance. */ | ||||
1449 | vec () = default; | ||||
1450 | vec (const vec &) = default; | ||||
1451 | /* Initialization from the generic vNULL. */ | ||||
1452 | vec (vnull): m_vec () { } | ||||
1453 | /* Same as default ctor: vec storage must be released manually. */ | ||||
1454 | ~vec () = default; | ||||
1455 | |||||
1456 | /* Defaulted same as copy ctor. */ | ||||
1457 | vec& operator= (const vec &) = default; | ||||
1458 | |||||
1459 | /* Prevent implicit conversion from auto_vec. Use auto_vec::to_vec() | ||||
1460 | instead. */ | ||||
1461 | template <size_t N> | ||||
1462 | vec (auto_vec<T, N> &) = delete; | ||||
1463 | |||||
1464 | template <size_t N> | ||||
1465 | void operator= (auto_vec<T, N> &) = delete; | ||||
1466 | |||||
1467 | /* Memory allocation and deallocation for the embedded vector. | ||||
1468 | Needed because we cannot have proper ctors/dtors defined. */ | ||||
1469 | void create (unsigned nelems CXX_MEM_STAT_INFO); | ||||
1470 | void release (void); | ||||
1471 | |||||
1472 | /* Vector operations. */ | ||||
1473 | bool exists (void) const | ||||
1474 | { return m_vec != NULLnullptr; } | ||||
1475 | |||||
1476 | bool is_empty (void) const | ||||
1477 | { return m_vec ? m_vec->is_empty () : true; } | ||||
1478 | |||||
1479 | unsigned allocated (void) const | ||||
1480 | { return m_vec ? m_vec->allocated () : 0; } | ||||
1481 | |||||
1482 | unsigned length (void) const | ||||
1483 | { return m_vec ? m_vec->length () : 0; } | ||||
1484 | |||||
1485 | T *address (void) | ||||
1486 | { return m_vec ? m_vec->address () : NULLnullptr; } | ||||
1487 | |||||
1488 | const T *address (void) const | ||||
1489 | { return m_vec ? m_vec->address () : NULLnullptr; } | ||||
1490 | |||||
1491 | T *begin () { return address (); } | ||||
1492 | const T *begin () const { return address (); } | ||||
1493 | T *end () { return begin () + length (); } | ||||
1494 | const T *end () const { return begin () + length (); } | ||||
1495 | const T &operator[] (unsigned ix) const | ||||
1496 | { return (*m_vec)[ix]; } | ||||
1497 | |||||
1498 | bool operator!=(const vec &other) const | ||||
1499 | { return !(*this == other); } | ||||
1500 | |||||
1501 | bool operator==(const vec &other) const | ||||
1502 | { return address () == other.address (); } | ||||
1503 | |||||
1504 | T &operator[] (unsigned ix) | ||||
1505 | { return (*m_vec)[ix]; } | ||||
1506 | |||||
1507 | T &last (void) | ||||
1508 | { return m_vec->last (); } | ||||
1509 | |||||
1510 | bool space (int nelems) const | ||||
1511 | { return m_vec ? m_vec->space (nelems) : nelems == 0; } | ||||
1512 | |||||
1513 | bool iterate (unsigned ix, T *p) const; | ||||
1514 | bool iterate (unsigned ix, T **p) const; | ||||
1515 | vec copy (ALONE_CXX_MEM_STAT_INFO) const; | ||||
1516 | bool reserve (unsigned, bool = false CXX_MEM_STAT_INFO); | ||||
1517 | bool reserve_exact (unsigned CXX_MEM_STAT_INFO); | ||||
1518 | void splice (const vec &); | ||||
1519 | void safe_splice (const vec & CXX_MEM_STAT_INFO); | ||||
1520 | T *quick_push (const T &); | ||||
1521 | T *safe_push (const T &CXX_MEM_STAT_INFO); | ||||
1522 | T &pop (void); | ||||
1523 | void truncate (unsigned); | ||||
1524 | void safe_grow (unsigned, bool = false CXX_MEM_STAT_INFO); | ||||
1525 | void safe_grow_cleared (unsigned, bool = false CXX_MEM_STAT_INFO); | ||||
1526 | void quick_grow (unsigned); | ||||
1527 | void quick_grow_cleared (unsigned); | ||||
1528 | void quick_insert (unsigned, const T &); | ||||
1529 | void safe_insert (unsigned, const T & CXX_MEM_STAT_INFO); | ||||
1530 | void ordered_remove (unsigned); | ||||
1531 | void unordered_remove (unsigned); | ||||
1532 | void block_remove (unsigned, unsigned); | ||||
1533 | void qsort (int (*) (const void *, const void *))qsort (int (*) (const void *, const void *)); | ||||
1534 | void sort (int (*) (const void *, const void *, void *), void *); | ||||
1535 | void stablesort (int (*) (const void *, const void *, void *), void *); | ||||
1536 | T *bsearch (const void *key, int (*compar)(const void *, const void *)); | ||||
1537 | T *bsearch (const void *key, | ||||
1538 | int (*compar)(const void *, const void *, void *), void *); | ||||
1539 | unsigned lower_bound (T, bool (*)(const T &, const T &)) const; | ||||
1540 | bool contains (const T &search) const; | ||||
1541 | void reverse (void); | ||||
1542 | |||||
1543 | bool using_auto_storage () const; | ||||
1544 | |||||
1545 | /* FIXME - This field should be private, but we need to cater to | ||||
1546 | compilers that have stricter notions of PODness for types. */ | ||||
1547 | vec<T, va_heap, vl_embed> *m_vec; | ||||
1548 | }; | ||||
1549 | |||||
1550 | |||||
1551 | /* auto_vec is a subclass of vec that automatically manages creating and | ||||
1552 | releasing the internal vector. If N is non zero then it has N elements of | ||||
1553 | internal storage. The default is no internal storage, and you probably only | ||||
1554 | want to ask for internal storage for vectors on the stack because if the | ||||
1555 | size of the vector is larger than the internal storage that space is wasted. | ||||
1556 | */ | ||||
1557 | template<typename T, size_t N /* = 0 */> | ||||
1558 | class auto_vec : public vec<T, va_heap> | ||||
1559 | { | ||||
1560 | public: | ||||
1561 | auto_vec () | ||||
1562 | { | ||||
1563 | m_auto.embedded_init (N, 0, 1); | ||||
1564 | /* ??? Instead of initializing m_vec from &m_auto directly use an | ||||
1565 | expression that avoids refering to a specific member of 'this' | ||||
1566 | to derail the -Wstringop-overflow diagnostic code, avoiding | ||||
1567 | the impression that data accesses are supposed to be to the | ||||
1568 | m_auto member storage. */ | ||||
1569 | size_t off = (char *) &m_auto - (char *) this; | ||||
1570 | this->m_vec = (vec<T, va_heap, vl_embed> *) ((char *) this + off); | ||||
1571 | } | ||||
1572 | |||||
1573 | auto_vec (size_t s CXX_MEM_STAT_INFO) | ||||
1574 | { | ||||
1575 | if (s > N) | ||||
1576 | { | ||||
1577 | this->create (s PASS_MEM_STAT); | ||||
1578 | return; | ||||
1579 | } | ||||
1580 | |||||
1581 | m_auto.embedded_init (N, 0, 1); | ||||
1582 | /* ??? See above. */ | ||||
1583 | size_t off = (char *) &m_auto - (char *) this; | ||||
1584 | this->m_vec = (vec<T, va_heap, vl_embed> *) ((char *) this + off); | ||||
1585 | } | ||||
1586 | |||||
1587 | ~auto_vec () | ||||
1588 | { | ||||
1589 | this->release (); | ||||
1590 | } | ||||
1591 | |||||
1592 | /* Explicitly convert to the base class. There is no conversion | ||||
1593 | from a const auto_vec because a copy of the returned vec can | ||||
1594 | be used to modify *THIS. | ||||
1595 | This is a legacy function not to be used in new code. */ | ||||
1596 | vec<T, va_heap> to_vec_legacy () { | ||||
1597 | return *static_cast<vec<T, va_heap> *>(this); | ||||
1598 | } | ||||
1599 | |||||
1600 | private: | ||||
1601 | vec<T, va_heap, vl_embed> m_auto; | ||||
1602 | unsigned char m_data[sizeof (T) * N]; | ||||
1603 | }; | ||||
1604 | |||||
1605 | /* auto_vec is a sub class of vec whose storage is released when it is | ||||
1606 | destroyed. */ | ||||
1607 | template<typename T> | ||||
1608 | class auto_vec<T, 0> : public vec<T, va_heap> | ||||
1609 | { | ||||
1610 | public: | ||||
1611 | auto_vec () { this->m_vec = NULLnullptr; } | ||||
1612 | auto_vec (size_t n CXX_MEM_STAT_INFO) { this->create (n PASS_MEM_STAT); } | ||||
1613 | ~auto_vec () { this->release (); } | ||||
1614 | |||||
1615 | auto_vec (vec<T, va_heap>&& r) | ||||
1616 | { | ||||
1617 | gcc_assert (!r.using_auto_storage ())((void)(!(!r.using_auto_storage ()) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 1617, __FUNCTION__), 0 : 0)); | ||||
1618 | this->m_vec = r.m_vec; | ||||
1619 | r.m_vec = NULLnullptr; | ||||
1620 | } | ||||
1621 | |||||
1622 | auto_vec (auto_vec<T> &&r) | ||||
1623 | { | ||||
1624 | gcc_assert (!r.using_auto_storage ())((void)(!(!r.using_auto_storage ()) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 1624, __FUNCTION__), 0 : 0)); | ||||
1625 | this->m_vec = r.m_vec; | ||||
1626 | r.m_vec = NULLnullptr; | ||||
1627 | } | ||||
1628 | |||||
1629 | auto_vec& operator= (vec<T, va_heap>&& r) | ||||
1630 | { | ||||
1631 | if (this == &r) | ||||
1632 | return *this; | ||||
1633 | |||||
1634 | gcc_assert (!r.using_auto_storage ())((void)(!(!r.using_auto_storage ()) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 1634, __FUNCTION__), 0 : 0)); | ||||
1635 | this->release (); | ||||
1636 | this->m_vec = r.m_vec; | ||||
1637 | r.m_vec = NULLnullptr; | ||||
1638 | return *this; | ||||
1639 | } | ||||
1640 | |||||
1641 | auto_vec& operator= (auto_vec<T> &&r) | ||||
1642 | { | ||||
1643 | if (this == &r) | ||||
1644 | return *this; | ||||
1645 | |||||
1646 | gcc_assert (!r.using_auto_storage ())((void)(!(!r.using_auto_storage ()) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 1646, __FUNCTION__), 0 : 0)); | ||||
1647 | this->release (); | ||||
1648 | this->m_vec = r.m_vec; | ||||
1649 | r.m_vec = NULLnullptr; | ||||
1650 | return *this; | ||||
1651 | } | ||||
1652 | |||||
1653 | /* Explicitly convert to the base class. There is no conversion | ||||
1654 | from a const auto_vec because a copy of the returned vec can | ||||
1655 | be used to modify *THIS. | ||||
1656 | This is a legacy function not to be used in new code. */ | ||||
1657 | vec<T, va_heap> to_vec_legacy () { | ||||
1658 | return *static_cast<vec<T, va_heap> *>(this); | ||||
1659 | } | ||||
1660 | |||||
1661 | // You probably don't want to copy a vector, so these are deleted to prevent | ||||
1662 | // unintentional use. If you really need a copy of the vectors contents you | ||||
1663 | // can use copy (). | ||||
1664 | auto_vec(const auto_vec &) = delete; | ||||
1665 | auto_vec &operator= (const auto_vec &) = delete; | ||||
1666 | }; | ||||
1667 | |||||
1668 | |||||
1669 | /* Allocate heap memory for pointer V and create the internal vector | ||||
1670 | with space for NELEMS elements. If NELEMS is 0, the internal | ||||
1671 | vector is initialized to empty. */ | ||||
1672 | |||||
1673 | template<typename T> | ||||
1674 | inline void | ||||
1675 | vec_alloc (vec<T> *&v, unsigned nelems CXX_MEM_STAT_INFO) | ||||
1676 | { | ||||
1677 | v = new vec<T>; | ||||
1678 | v->create (nelems PASS_MEM_STAT); | ||||
1679 | } | ||||
1680 | |||||
1681 | |||||
1682 | /* A subclass of auto_vec <char *> that frees all of its elements on | ||||
1683 | deletion. */ | ||||
1684 | |||||
1685 | class auto_string_vec : public auto_vec <char *> | ||||
1686 | { | ||||
1687 | public: | ||||
1688 | ~auto_string_vec (); | ||||
1689 | }; | ||||
1690 | |||||
1691 | /* A subclass of auto_vec <T *> that deletes all of its elements on | ||||
1692 | destruction. | ||||
1693 | |||||
1694 | This is a crude way for a vec to "own" the objects it points to | ||||
1695 | and clean up automatically. | ||||
1696 | |||||
1697 | For example, no attempt is made to delete elements when an item | ||||
1698 | within the vec is overwritten. | ||||
1699 | |||||
1700 | We can't rely on gnu::unique_ptr within a container, | ||||
1701 | since we can't rely on move semantics in C++98. */ | ||||
1702 | |||||
1703 | template <typename T> | ||||
1704 | class auto_delete_vec : public auto_vec <T *> | ||||
1705 | { | ||||
1706 | public: | ||||
1707 | auto_delete_vec () {} | ||||
1708 | auto_delete_vec (size_t s) : auto_vec <T *> (s) {} | ||||
1709 | |||||
1710 | ~auto_delete_vec (); | ||||
1711 | |||||
1712 | private: | ||||
1713 | DISABLE_COPY_AND_ASSIGN(auto_delete_vec)auto_delete_vec (const auto_delete_vec&) = delete; void operator = (const auto_delete_vec &) = delete; | ||||
1714 | }; | ||||
1715 | |||||
1716 | /* Conditionally allocate heap memory for VEC and its internal vector. */ | ||||
1717 | |||||
1718 | template<typename T> | ||||
1719 | inline void | ||||
1720 | vec_check_alloc (vec<T, va_heap> *&vec, unsigned nelems CXX_MEM_STAT_INFO) | ||||
1721 | { | ||||
1722 | if (!vec) | ||||
1723 | vec_alloc (vec, nelems PASS_MEM_STAT); | ||||
1724 | } | ||||
1725 | |||||
1726 | |||||
1727 | /* Free the heap memory allocated by vector V and set it to NULL. */ | ||||
1728 | |||||
1729 | template<typename T> | ||||
1730 | inline void | ||||
1731 | vec_free (vec<T> *&v) | ||||
1732 | { | ||||
1733 | if (v == NULLnullptr) | ||||
1734 | return; | ||||
1735 | |||||
1736 | v->release (); | ||||
1737 | delete v; | ||||
1738 | v = NULLnullptr; | ||||
1739 | } | ||||
1740 | |||||
1741 | |||||
1742 | /* Return iteration condition and update PTR to point to the IX'th | ||||
1743 | element of this vector. Use this to iterate over the elements of a | ||||
1744 | vector as follows, | ||||
1745 | |||||
1746 | for (ix = 0; v.iterate (ix, &ptr); ix++) | ||||
1747 | continue; */ | ||||
1748 | |||||
1749 | template<typename T> | ||||
1750 | inline bool | ||||
1751 | vec<T, va_heap, vl_ptr>::iterate (unsigned ix, T *ptr) const | ||||
1752 | { | ||||
1753 | if (m_vec) | ||||
1754 | return m_vec->iterate (ix, ptr); | ||||
1755 | else | ||||
1756 | { | ||||
1757 | *ptr = 0; | ||||
1758 | return false; | ||||
1759 | } | ||||
1760 | } | ||||
1761 | |||||
1762 | |||||
1763 | /* Return iteration condition and update *PTR to point to the | ||||
1764 | IX'th element of this vector. Use this to iterate over the | ||||
1765 | elements of a vector as follows, | ||||
1766 | |||||
1767 | for (ix = 0; v->iterate (ix, &ptr); ix++) | ||||
1768 | continue; | ||||
1769 | |||||
1770 | This variant is for vectors of objects. */ | ||||
1771 | |||||
1772 | template<typename T> | ||||
1773 | inline bool | ||||
1774 | vec<T, va_heap, vl_ptr>::iterate (unsigned ix, T **ptr) const | ||||
1775 | { | ||||
1776 | if (m_vec) | ||||
1777 | return m_vec->iterate (ix, ptr); | ||||
1778 | else | ||||
1779 | { | ||||
1780 | *ptr = 0; | ||||
1781 | return false; | ||||
1782 | } | ||||
1783 | } | ||||
1784 | |||||
1785 | |||||
1786 | /* Convenience macro for forward iteration. */ | ||||
1787 | #define FOR_EACH_VEC_ELT(V, I, P)for (I = 0; (V).iterate ((I), &(P)); ++(I)) \ | ||||
1788 | for (I = 0; (V).iterate ((I), &(P)); ++(I)) | ||||
1789 | |||||
1790 | #define FOR_EACH_VEC_SAFE_ELT(V, I, P)for (I = 0; vec_safe_iterate ((V), (I), &(P)); ++(I)) \ | ||||
1791 | for (I = 0; vec_safe_iterate ((V), (I), &(P)); ++(I)) | ||||
1792 | |||||
1793 | /* Likewise, but start from FROM rather than 0. */ | ||||
1794 | #define FOR_EACH_VEC_ELT_FROM(V, I, P, FROM)for (I = (FROM); (V).iterate ((I), &(P)); ++(I)) \ | ||||
1795 | for (I = (FROM); (V).iterate ((I), &(P)); ++(I)) | ||||
1796 | |||||
1797 | /* Convenience macro for reverse iteration. */ | ||||
1798 | #define FOR_EACH_VEC_ELT_REVERSE(V, I, P)for (I = (V).length () - 1; (V).iterate ((I), &(P)); (I)-- ) \ | ||||
1799 | for (I = (V).length () - 1; \ | ||||
1800 | (V).iterate ((I), &(P)); \ | ||||
1801 | (I)--) | ||||
1802 | |||||
1803 | #define FOR_EACH_VEC_SAFE_ELT_REVERSE(V, I, P)for (I = vec_safe_length (V) - 1; vec_safe_iterate ((V), (I), &(P)); (I)--) \ | ||||
1804 | for (I = vec_safe_length (V) - 1; \ | ||||
1805 | vec_safe_iterate ((V), (I), &(P)); \ | ||||
1806 | (I)--) | ||||
1807 | |||||
1808 | /* auto_string_vec's dtor, freeing all contained strings, automatically | ||||
1809 | chaining up to ~auto_vec <char *>, which frees the internal buffer. */ | ||||
1810 | |||||
1811 | inline | ||||
1812 | auto_string_vec::~auto_string_vec () | ||||
1813 | { | ||||
1814 | int i; | ||||
1815 | char *str; | ||||
1816 | FOR_EACH_VEC_ELT (*this, i, str)for (i = 0; (*this).iterate ((i), &(str)); ++(i)) | ||||
1817 | free (str); | ||||
1818 | } | ||||
1819 | |||||
1820 | /* auto_delete_vec's dtor, deleting all contained items, automatically | ||||
1821 | chaining up to ~auto_vec <T*>, which frees the internal buffer. */ | ||||
1822 | |||||
1823 | template <typename T> | ||||
1824 | inline | ||||
1825 | auto_delete_vec<T>::~auto_delete_vec () | ||||
1826 | { | ||||
1827 | int i; | ||||
1828 | T *item; | ||||
1829 | FOR_EACH_VEC_ELT (*this, i, item)for (i = 0; (*this).iterate ((i), &(item)); ++(i)) | ||||
1830 | delete item; | ||||
1831 | } | ||||
1832 | |||||
1833 | |||||
1834 | /* Return a copy of this vector. */ | ||||
1835 | |||||
1836 | template<typename T> | ||||
1837 | inline vec<T, va_heap, vl_ptr> | ||||
1838 | vec<T, va_heap, vl_ptr>::copy (ALONE_MEM_STAT_DECLvoid) const | ||||
1839 | { | ||||
1840 | vec<T, va_heap, vl_ptr> new_vec{ }; | ||||
1841 | if (length ()) | ||||
1842 | new_vec.m_vec = m_vec->copy (ALONE_PASS_MEM_STAT); | ||||
1843 | return new_vec; | ||||
1844 | } | ||||
1845 | |||||
1846 | |||||
1847 | /* Ensure that the vector has at least RESERVE slots available (if | ||||
1848 | EXACT is false), or exactly RESERVE slots available (if EXACT is | ||||
1849 | true). | ||||
1850 | |||||
1851 | This may create additional headroom if EXACT is false. | ||||
1852 | |||||
1853 | Note that this can cause the embedded vector to be reallocated. | ||||
1854 | Returns true iff reallocation actually occurred. */ | ||||
1855 | |||||
1856 | template<typename T> | ||||
1857 | inline bool | ||||
1858 | vec<T, va_heap, vl_ptr>::reserve (unsigned nelems, bool exact MEM_STAT_DECL) | ||||
1859 | { | ||||
1860 | if (space (nelems)) | ||||
1861 | return false; | ||||
1862 | |||||
1863 | /* For now play a game with va_heap::reserve to hide our auto storage if any, | ||||
1864 | this is necessary because it doesn't have enough information to know the | ||||
1865 | embedded vector is in auto storage, and so should not be freed. */ | ||||
1866 | vec<T, va_heap, vl_embed> *oldvec = m_vec; | ||||
1867 | unsigned int oldsize = 0; | ||||
1868 | bool handle_auto_vec = m_vec && using_auto_storage (); | ||||
1869 | if (handle_auto_vec) | ||||
1870 | { | ||||
1871 | m_vec = NULLnullptr; | ||||
1872 | oldsize = oldvec->length (); | ||||
1873 | nelems += oldsize; | ||||
1874 | } | ||||
1875 | |||||
1876 | va_heap::reserve (m_vec, nelems, exact PASS_MEM_STAT); | ||||
1877 | if (handle_auto_vec) | ||||
1878 | { | ||||
1879 | vec_copy_construct (m_vec->address (), oldvec->address (), oldsize); | ||||
1880 | m_vec->m_vecpfx.m_num = oldsize; | ||||
1881 | } | ||||
1882 | |||||
1883 | return true; | ||||
1884 | } | ||||
1885 | |||||
1886 | |||||
1887 | /* Ensure that this vector has exactly NELEMS slots available. This | ||||
1888 | will not create additional headroom. Note this can cause the | ||||
1889 | embedded vector to be reallocated. Returns true iff reallocation | ||||
1890 | actually occurred. */ | ||||
1891 | |||||
1892 | template<typename T> | ||||
1893 | inline bool | ||||
1894 | vec<T, va_heap, vl_ptr>::reserve_exact (unsigned nelems MEM_STAT_DECL) | ||||
1895 | { | ||||
1896 | return reserve (nelems, true PASS_MEM_STAT); | ||||
1897 | } | ||||
1898 | |||||
1899 | |||||
1900 | /* Create the internal vector and reserve NELEMS for it. This is | ||||
1901 | exactly like vec::reserve, but the internal vector is | ||||
1902 | unconditionally allocated from scratch. The old one, if it | ||||
1903 | existed, is lost. */ | ||||
1904 | |||||
1905 | template<typename T> | ||||
1906 | inline void | ||||
1907 | vec<T, va_heap, vl_ptr>::create (unsigned nelems MEM_STAT_DECL) | ||||
1908 | { | ||||
1909 | m_vec = NULLnullptr; | ||||
1910 | if (nelems > 0) | ||||
1911 | reserve_exact (nelems PASS_MEM_STAT); | ||||
1912 | } | ||||
1913 | |||||
1914 | |||||
1915 | /* Free the memory occupied by the embedded vector. */ | ||||
1916 | |||||
1917 | template<typename T> | ||||
1918 | inline void | ||||
1919 | vec<T, va_heap, vl_ptr>::release (void) | ||||
1920 | { | ||||
1921 | if (!m_vec) | ||||
1922 | return; | ||||
1923 | |||||
1924 | if (using_auto_storage ()) | ||||
1925 | { | ||||
1926 | m_vec->m_vecpfx.m_num = 0; | ||||
1927 | return; | ||||
1928 | } | ||||
1929 | |||||
1930 | va_heap::release (m_vec); | ||||
1931 | } | ||||
1932 | |||||
1933 | /* Copy the elements from SRC to the end of this vector as if by memcpy. | ||||
1934 | SRC and this vector must be allocated with the same memory | ||||
1935 | allocation mechanism. This vector is assumed to have sufficient | ||||
1936 | headroom available. */ | ||||
1937 | |||||
1938 | template<typename T> | ||||
1939 | inline void | ||||
1940 | vec<T, va_heap, vl_ptr>::splice (const vec<T, va_heap, vl_ptr> &src) | ||||
1941 | { | ||||
1942 | if (src.length ()) | ||||
1943 | m_vec->splice (*(src.m_vec)); | ||||
1944 | } | ||||
1945 | |||||
1946 | |||||
1947 | /* Copy the elements in SRC to the end of this vector as if by memcpy. | ||||
1948 | SRC and this vector must be allocated with the same mechanism. | ||||
1949 | If there is not enough headroom in this vector, it will be reallocated | ||||
1950 | as needed. */ | ||||
1951 | |||||
1952 | template<typename T> | ||||
1953 | inline void | ||||
1954 | vec<T, va_heap, vl_ptr>::safe_splice (const vec<T, va_heap, vl_ptr> &src | ||||
1955 | MEM_STAT_DECL) | ||||
1956 | { | ||||
1957 | if (src.length ()) | ||||
1958 | { | ||||
1959 | reserve_exact (src.length ()); | ||||
1960 | splice (src); | ||||
1961 | } | ||||
1962 | } | ||||
1963 | |||||
1964 | |||||
1965 | /* Push OBJ (a new element) onto the end of the vector. There must be | ||||
1966 | sufficient space in the vector. Return a pointer to the slot | ||||
1967 | where OBJ was inserted. */ | ||||
1968 | |||||
1969 | template<typename T> | ||||
1970 | inline T * | ||||
1971 | vec<T, va_heap, vl_ptr>::quick_push (const T &obj) | ||||
1972 | { | ||||
1973 | return m_vec->quick_push (obj); | ||||
1974 | } | ||||
1975 | |||||
1976 | |||||
1977 | /* Push a new element OBJ onto the end of this vector. Reallocates | ||||
1978 | the embedded vector, if needed. Return a pointer to the slot where | ||||
1979 | OBJ was inserted. */ | ||||
1980 | |||||
1981 | template<typename T> | ||||
1982 | inline T * | ||||
1983 | vec<T, va_heap, vl_ptr>::safe_push (const T &obj MEM_STAT_DECL) | ||||
1984 | { | ||||
1985 | reserve (1, false PASS_MEM_STAT); | ||||
1986 | return quick_push (obj); | ||||
1987 | } | ||||
1988 | |||||
1989 | |||||
1990 | /* Pop and return the last element off the end of the vector. */ | ||||
1991 | |||||
1992 | template<typename T> | ||||
1993 | inline T & | ||||
1994 | vec<T, va_heap, vl_ptr>::pop (void) | ||||
1995 | { | ||||
1996 | return m_vec->pop (); | ||||
1997 | } | ||||
1998 | |||||
1999 | |||||
2000 | /* Set the length of the vector to LEN. The new length must be less | ||||
2001 | than or equal to the current length. This is an O(1) operation. */ | ||||
2002 | |||||
2003 | template<typename T> | ||||
2004 | inline void | ||||
2005 | vec<T, va_heap, vl_ptr>::truncate (unsigned size) | ||||
2006 | { | ||||
2007 | if (m_vec) | ||||
2008 | m_vec->truncate (size); | ||||
2009 | else | ||||
2010 | gcc_checking_assert (size == 0)((void)(!(size == 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 2010, __FUNCTION__), 0 : 0)); | ||||
2011 | } | ||||
2012 | |||||
2013 | |||||
2014 | /* Grow the vector to a specific length. LEN must be as long or | ||||
2015 | longer than the current length. The new elements are | ||||
2016 | uninitialized. Reallocate the internal vector, if needed. */ | ||||
2017 | |||||
2018 | template<typename T> | ||||
2019 | inline void | ||||
2020 | vec<T, va_heap, vl_ptr>::safe_grow (unsigned len, bool exact MEM_STAT_DECL) | ||||
2021 | { | ||||
2022 | unsigned oldlen = length (); | ||||
2023 | gcc_checking_assert (oldlen <= len)((void)(!(oldlen <= len) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 2023, __FUNCTION__), 0 : 0)); | ||||
2024 | reserve (len - oldlen, exact PASS_MEM_STAT); | ||||
2025 | if (m_vec) | ||||
2026 | m_vec->quick_grow (len); | ||||
2027 | else | ||||
2028 | gcc_checking_assert (len == 0)((void)(!(len == 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 2028, __FUNCTION__), 0 : 0)); | ||||
2029 | } | ||||
2030 | |||||
2031 | |||||
2032 | /* Grow the embedded vector to a specific length. LEN must be as | ||||
2033 | long or longer than the current length. The new elements are | ||||
2034 | initialized to zero. Reallocate the internal vector, if needed. */ | ||||
2035 | |||||
2036 | template<typename T> | ||||
2037 | inline void | ||||
2038 | vec<T, va_heap, vl_ptr>::safe_grow_cleared (unsigned len, bool exact | ||||
2039 | MEM_STAT_DECL) | ||||
2040 | { | ||||
2041 | unsigned oldlen = length (); | ||||
2042 | size_t growby = len - oldlen; | ||||
2043 | safe_grow (len, exact PASS_MEM_STAT); | ||||
2044 | if (growby != 0) | ||||
2045 | vec_default_construct (address () + oldlen, growby); | ||||
2046 | } | ||||
2047 | |||||
2048 | |||||
2049 | /* Same as vec::safe_grow but without reallocation of the internal vector. | ||||
2050 | If the vector cannot be extended, a runtime assertion will be triggered. */ | ||||
2051 | |||||
2052 | template<typename T> | ||||
2053 | inline void | ||||
2054 | vec<T, va_heap, vl_ptr>::quick_grow (unsigned len) | ||||
2055 | { | ||||
2056 | gcc_checking_assert (m_vec)((void)(!(m_vec) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 2056, __FUNCTION__), 0 : 0)); | ||||
2057 | m_vec->quick_grow (len); | ||||
2058 | } | ||||
2059 | |||||
2060 | |||||
2061 | /* Same as vec::quick_grow_cleared but without reallocation of the | ||||
2062 | internal vector. If the vector cannot be extended, a runtime | ||||
2063 | assertion will be triggered. */ | ||||
2064 | |||||
2065 | template<typename T> | ||||
2066 | inline void | ||||
2067 | vec<T, va_heap, vl_ptr>::quick_grow_cleared (unsigned len) | ||||
2068 | { | ||||
2069 | gcc_checking_assert (m_vec)((void)(!(m_vec) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 2069, __FUNCTION__), 0 : 0)); | ||||
2070 | m_vec->quick_grow_cleared (len); | ||||
2071 | } | ||||
2072 | |||||
2073 | |||||
2074 | /* Insert an element, OBJ, at the IXth position of this vector. There | ||||
2075 | must be sufficient space. */ | ||||
2076 | |||||
2077 | template<typename T> | ||||
2078 | inline void | ||||
2079 | vec<T, va_heap, vl_ptr>::quick_insert (unsigned ix, const T &obj) | ||||
2080 | { | ||||
2081 | m_vec->quick_insert (ix, obj); | ||||
2082 | } | ||||
2083 | |||||
2084 | |||||
2085 | /* Insert an element, OBJ, at the IXth position of the vector. | ||||
2086 | Reallocate the embedded vector, if necessary. */ | ||||
2087 | |||||
2088 | template<typename T> | ||||
2089 | inline void | ||||
2090 | vec<T, va_heap, vl_ptr>::safe_insert (unsigned ix, const T &obj MEM_STAT_DECL) | ||||
2091 | { | ||||
2092 | reserve (1, false PASS_MEM_STAT); | ||||
2093 | quick_insert (ix, obj); | ||||
2094 | } | ||||
2095 | |||||
2096 | |||||
2097 | /* Remove an element from the IXth position of this vector. Ordering of | ||||
2098 | remaining elements is preserved. This is an O(N) operation due to | ||||
2099 | a memmove. */ | ||||
2100 | |||||
2101 | template<typename T> | ||||
2102 | inline void | ||||
2103 | vec<T, va_heap, vl_ptr>::ordered_remove (unsigned ix) | ||||
2104 | { | ||||
2105 | m_vec->ordered_remove (ix); | ||||
2106 | } | ||||
2107 | |||||
2108 | |||||
2109 | /* Remove an element from the IXth position of this vector. Ordering | ||||
2110 | of remaining elements is destroyed. This is an O(1) operation. */ | ||||
2111 | |||||
2112 | template<typename T> | ||||
2113 | inline void | ||||
2114 | vec<T, va_heap, vl_ptr>::unordered_remove (unsigned ix) | ||||
2115 | { | ||||
2116 | m_vec->unordered_remove (ix); | ||||
2117 | } | ||||
2118 | |||||
2119 | |||||
2120 | /* Remove LEN elements starting at the IXth. Ordering is retained. | ||||
2121 | This is an O(N) operation due to memmove. */ | ||||
2122 | |||||
2123 | template<typename T> | ||||
2124 | inline void | ||||
2125 | vec<T, va_heap, vl_ptr>::block_remove (unsigned ix, unsigned len) | ||||
2126 | { | ||||
2127 | m_vec->block_remove (ix, len); | ||||
2128 | } | ||||
2129 | |||||
2130 | |||||
2131 | /* Sort the contents of this vector with qsort. CMP is the comparison | ||||
2132 | function to pass to qsort. */ | ||||
2133 | |||||
2134 | template<typename T> | ||||
2135 | inline void | ||||
2136 | vec<T, va_heap, vl_ptr>::qsort (int (*cmp) (const void *, const void *))qsort (int (*cmp) (const void *, const void *)) | ||||
2137 | { | ||||
2138 | if (m_vec) | ||||
2139 | m_vec->qsort (cmp)qsort (cmp); | ||||
2140 | } | ||||
2141 | |||||
2142 | /* Sort the contents of this vector with qsort. CMP is the comparison | ||||
2143 | function to pass to qsort. */ | ||||
2144 | |||||
2145 | template<typename T> | ||||
2146 | inline void | ||||
2147 | vec<T, va_heap, vl_ptr>::sort (int (*cmp) (const void *, const void *, | ||||
2148 | void *), void *data) | ||||
2149 | { | ||||
2150 | if (m_vec) | ||||
2151 | m_vec->sort (cmp, data); | ||||
2152 | } | ||||
2153 | |||||
2154 | /* Sort the contents of this vector with gcc_stablesort_r. CMP is the | ||||
2155 | comparison function to pass to qsort. */ | ||||
2156 | |||||
2157 | template<typename T> | ||||
2158 | inline void | ||||
2159 | vec<T, va_heap, vl_ptr>::stablesort (int (*cmp) (const void *, const void *, | ||||
2160 | void *), void *data) | ||||
2161 | { | ||||
2162 | if (m_vec) | ||||
2163 | m_vec->stablesort (cmp, data); | ||||
2164 | } | ||||
2165 | |||||
2166 | /* Search the contents of the sorted vector with a binary search. | ||||
2167 | CMP is the comparison function to pass to bsearch. */ | ||||
2168 | |||||
2169 | template<typename T> | ||||
2170 | inline T * | ||||
2171 | vec<T, va_heap, vl_ptr>::bsearch (const void *key, | ||||
2172 | int (*cmp) (const void *, const void *)) | ||||
2173 | { | ||||
2174 | if (m_vec) | ||||
2175 | return m_vec->bsearch (key, cmp); | ||||
2176 | return NULLnullptr; | ||||
2177 | } | ||||
2178 | |||||
2179 | /* Search the contents of the sorted vector with a binary search. | ||||
2180 | CMP is the comparison function to pass to bsearch. */ | ||||
2181 | |||||
2182 | template<typename T> | ||||
2183 | inline T * | ||||
2184 | vec<T, va_heap, vl_ptr>::bsearch (const void *key, | ||||
2185 | int (*cmp) (const void *, const void *, | ||||
2186 | void *), void *data) | ||||
2187 | { | ||||
2188 | if (m_vec) | ||||
2189 | return m_vec->bsearch (key, cmp, data); | ||||
2190 | return NULLnullptr; | ||||
2191 | } | ||||
2192 | |||||
2193 | |||||
2194 | /* Find and return the first position in which OBJ could be inserted | ||||
2195 | without changing the ordering of this vector. LESSTHAN is a | ||||
2196 | function that returns true if the first argument is strictly less | ||||
2197 | than the second. */ | ||||
2198 | |||||
2199 | template<typename T> | ||||
2200 | inline unsigned | ||||
2201 | vec<T, va_heap, vl_ptr>::lower_bound (T obj, | ||||
2202 | bool (*lessthan)(const T &, const T &)) | ||||
2203 | const | ||||
2204 | { | ||||
2205 | return m_vec ? m_vec->lower_bound (obj, lessthan) : 0; | ||||
2206 | } | ||||
2207 | |||||
2208 | /* Return true if SEARCH is an element of V. Note that this is O(N) in the | ||||
2209 | size of the vector and so should be used with care. */ | ||||
2210 | |||||
2211 | template<typename T> | ||||
2212 | inline bool | ||||
2213 | vec<T, va_heap, vl_ptr>::contains (const T &search) const | ||||
2214 | { | ||||
2215 | return m_vec ? m_vec->contains (search) : false; | ||||
2216 | } | ||||
2217 | |||||
2218 | /* Reverse content of the vector. */ | ||||
2219 | |||||
2220 | template<typename T> | ||||
2221 | inline void | ||||
2222 | vec<T, va_heap, vl_ptr>::reverse (void) | ||||
2223 | { | ||||
2224 | unsigned l = length (); | ||||
2225 | T *ptr = address (); | ||||
2226 | |||||
2227 | for (unsigned i = 0; i < l / 2; i++) | ||||
2228 | std::swap (ptr[i], ptr[l - i - 1]); | ||||
2229 | } | ||||
2230 | |||||
2231 | template<typename T> | ||||
2232 | inline bool | ||||
2233 | vec<T, va_heap, vl_ptr>::using_auto_storage () const | ||||
2234 | { | ||||
2235 | return m_vec ? m_vec->m_vecpfx.m_using_auto_storage : false; | ||||
2236 | } | ||||
2237 | |||||
2238 | /* Release VEC and call release of all element vectors. */ | ||||
2239 | |||||
2240 | template<typename T> | ||||
2241 | inline void | ||||
2242 | release_vec_vec (vec<vec<T> > &vec) | ||||
2243 | { | ||||
2244 | for (unsigned i = 0; i < vec.length (); i++) | ||||
2245 | vec[i].release (); | ||||
2246 | |||||
2247 | vec.release (); | ||||
2248 | } | ||||
2249 | |||||
2250 | // Provide a subset of the std::span functionality. (We can't use std::span | ||||
2251 | // itself because it's a C++20 feature.) | ||||
2252 | // | ||||
2253 | // In addition, provide an invalid value that is distinct from all valid | ||||
2254 | // sequences (including the empty sequence). This can be used to return | ||||
2255 | // failure without having to use std::optional. | ||||
2256 | // | ||||
2257 | // There is no operator bool because it would be ambiguous whether it is | ||||
2258 | // testing for a valid value or an empty sequence. | ||||
2259 | template<typename T> | ||||
2260 | class array_slice | ||||
2261 | { | ||||
2262 | template<typename OtherT> friend class array_slice; | ||||
2263 | |||||
2264 | public: | ||||
2265 | using value_type = T; | ||||
2266 | using iterator = T *; | ||||
2267 | using const_iterator = const T *; | ||||
2268 | |||||
2269 | array_slice () : m_base (nullptr), m_size (0) {} | ||||
2270 | |||||
2271 | template<typename OtherT> | ||||
2272 | array_slice (array_slice<OtherT> other) | ||||
2273 | : m_base (other.m_base), m_size (other.m_size) {} | ||||
2274 | |||||
2275 | array_slice (iterator base, unsigned int size) | ||||
2276 | : m_base (base), m_size (size) {} | ||||
2277 | |||||
2278 | template<size_t N> | ||||
2279 | array_slice (T (&array)[N]) : m_base (array), m_size (N) {} | ||||
2280 | |||||
2281 | template<typename OtherT> | ||||
2282 | array_slice (const vec<OtherT> &v) | ||||
2283 | : m_base (v.address ()), m_size (v.length ()) {} | ||||
2284 | |||||
2285 | template<typename OtherT> | ||||
2286 | array_slice (vec<OtherT> &v) | ||||
2287 | : m_base (v.address ()), m_size (v.length ()) {} | ||||
2288 | |||||
2289 | template<typename OtherT> | ||||
2290 | array_slice (const vec<OtherT, va_gc> *v) | ||||
2291 | : m_base (v ? v->address () : nullptr), m_size (v ? v->length () : 0) {} | ||||
2292 | |||||
2293 | template<typename OtherT> | ||||
2294 | array_slice (vec<OtherT, va_gc> *v) | ||||
2295 | : m_base (v ? v->address () : nullptr), m_size (v ? v->length () : 0) {} | ||||
2296 | |||||
2297 | iterator begin () { return m_base; } | ||||
2298 | iterator end () { return m_base + m_size; } | ||||
2299 | |||||
2300 | const_iterator begin () const { return m_base; } | ||||
2301 | const_iterator end () const { return m_base + m_size; } | ||||
2302 | |||||
2303 | value_type &front (); | ||||
2304 | value_type &back (); | ||||
2305 | value_type &operator[] (unsigned int i); | ||||
2306 | |||||
2307 | const value_type &front () const; | ||||
2308 | const value_type &back () const; | ||||
2309 | const value_type &operator[] (unsigned int i) const; | ||||
2310 | |||||
2311 | size_t size () const { return m_size; } | ||||
2312 | size_t size_bytes () const { return m_size * sizeof (T); } | ||||
2313 | bool empty () const { return m_size == 0; } | ||||
2314 | |||||
2315 | // An invalid array_slice that represents a failed operation. This is | ||||
2316 | // distinct from an empty slice, which is a valid result in some contexts. | ||||
2317 | static array_slice invalid () { return { nullptr, ~0U }; } | ||||
2318 | |||||
2319 | // True if the array is valid, false if it is an array like INVALID. | ||||
2320 | bool is_valid () const { return m_base || m_size == 0; } | ||||
2321 | |||||
2322 | private: | ||||
2323 | iterator m_base; | ||||
2324 | unsigned int m_size; | ||||
2325 | }; | ||||
2326 | |||||
2327 | template<typename T> | ||||
2328 | inline typename array_slice<T>::value_type & | ||||
2329 | array_slice<T>::front () | ||||
2330 | { | ||||
2331 | gcc_checking_assert (m_size)((void)(!(m_size) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 2331, __FUNCTION__), 0 : 0)); | ||||
2332 | return m_base[0]; | ||||
2333 | } | ||||
2334 | |||||
2335 | template<typename T> | ||||
2336 | inline const typename array_slice<T>::value_type & | ||||
2337 | array_slice<T>::front () const | ||||
2338 | { | ||||
2339 | gcc_checking_assert (m_size)((void)(!(m_size) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 2339, __FUNCTION__), 0 : 0)); | ||||
2340 | return m_base[0]; | ||||
2341 | } | ||||
2342 | |||||
2343 | template<typename T> | ||||
2344 | inline typename array_slice<T>::value_type & | ||||
2345 | array_slice<T>::back () | ||||
2346 | { | ||||
2347 | gcc_checking_assert (m_size)((void)(!(m_size) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 2347, __FUNCTION__), 0 : 0)); | ||||
2348 | return m_base[m_size - 1]; | ||||
2349 | } | ||||
2350 | |||||
2351 | template<typename T> | ||||
2352 | inline const typename array_slice<T>::value_type & | ||||
2353 | array_slice<T>::back () const | ||||
2354 | { | ||||
2355 | gcc_checking_assert (m_size)((void)(!(m_size) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 2355, __FUNCTION__), 0 : 0)); | ||||
2356 | return m_base[m_size - 1]; | ||||
2357 | } | ||||
2358 | |||||
2359 | template<typename T> | ||||
2360 | inline typename array_slice<T>::value_type & | ||||
2361 | array_slice<T>::operator[] (unsigned int i) | ||||
2362 | { | ||||
2363 | gcc_checking_assert (i < m_size)((void)(!(i < m_size) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 2363, __FUNCTION__), 0 : 0)); | ||||
2364 | return m_base[i]; | ||||
2365 | } | ||||
2366 | |||||
2367 | template<typename T> | ||||
2368 | inline const typename array_slice<T>::value_type & | ||||
2369 | array_slice<T>::operator[] (unsigned int i) const | ||||
2370 | { | ||||
2371 | gcc_checking_assert (i < m_size)((void)(!(i < m_size) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 2371, __FUNCTION__), 0 : 0)); | ||||
2372 | return m_base[i]; | ||||
2373 | } | ||||
2374 | |||||
2375 | template<typename T> | ||||
2376 | array_slice<T> | ||||
2377 | make_array_slice (T *base, unsigned int size) | ||||
2378 | { | ||||
2379 | return array_slice<T> (base, size); | ||||
2380 | } | ||||
2381 | |||||
2382 | #if (GCC_VERSION(4 * 1000 + 2) >= 3000) | ||||
2383 | # pragma GCC poison m_vec m_vecpfx m_vecdata | ||||
2384 | #endif | ||||
2385 | |||||
2386 | #endif // GCC_VEC_H |