File: | build/gcc/stor-layout.cc |
Warning: | line 213, column 7 Called C++ object pointer is null |
Press '?' to see keyboard shortcuts
Keyboard shortcuts:
1 | /* C-compiler utilities for types and variables storage layout | |||
2 | Copyright (C) 1987-2023 Free Software Foundation, Inc. | |||
3 | ||||
4 | This file is part of GCC. | |||
5 | ||||
6 | GCC is free software; you can redistribute it and/or modify it under | |||
7 | the terms of the GNU General Public License as published by the Free | |||
8 | Software Foundation; either version 3, or (at your option) any later | |||
9 | version. | |||
10 | ||||
11 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY | |||
12 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |||
13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |||
14 | for more details. | |||
15 | ||||
16 | You should have received a copy of the GNU General Public License | |||
17 | along with GCC; see the file COPYING3. If not see | |||
18 | <http://www.gnu.org/licenses/>. */ | |||
19 | ||||
20 | ||||
21 | #include "config.h" | |||
22 | #include "system.h" | |||
23 | #include "coretypes.h" | |||
24 | #include "target.h" | |||
25 | #include "function.h" | |||
26 | #include "rtl.h" | |||
27 | #include "tree.h" | |||
28 | #include "memmodel.h" | |||
29 | #include "tm_p.h" | |||
30 | #include "stringpool.h" | |||
31 | #include "regs.h" | |||
32 | #include "emit-rtl.h" | |||
33 | #include "cgraph.h" | |||
34 | #include "diagnostic-core.h" | |||
35 | #include "fold-const.h" | |||
36 | #include "stor-layout.h" | |||
37 | #include "varasm.h" | |||
38 | #include "print-tree.h" | |||
39 | #include "langhooks.h" | |||
40 | #include "tree-inline.h" | |||
41 | #include "dumpfile.h" | |||
42 | #include "gimplify.h" | |||
43 | #include "attribs.h" | |||
44 | #include "debug.h" | |||
45 | #include "calls.h" | |||
46 | ||||
47 | /* Data type for the expressions representing sizes of data types. | |||
48 | It is the first integer type laid out. */ | |||
49 | tree sizetype_tab[(int) stk_type_kind_last]; | |||
50 | ||||
51 | /* If nonzero, this is an upper limit on alignment of structure fields. | |||
52 | The value is measured in bits. */ | |||
53 | unsigned int maximum_field_alignment = TARGET_DEFAULT_PACK_STRUCT0 * BITS_PER_UNIT(8); | |||
54 | ||||
55 | static tree self_referential_size (tree); | |||
56 | static void finalize_record_size (record_layout_info); | |||
57 | static void finalize_type_size (tree); | |||
58 | static void place_union_field (record_layout_info, tree); | |||
59 | static int excess_unit_span (HOST_WIDE_INTlong, HOST_WIDE_INTlong, HOST_WIDE_INTlong, | |||
60 | HOST_WIDE_INTlong, tree); | |||
61 | extern void debug_rli (record_layout_info); | |||
62 | ||||
63 | /* Given a size SIZE that may not be a constant, return a SAVE_EXPR | |||
64 | to serve as the actual size-expression for a type or decl. */ | |||
65 | ||||
66 | tree | |||
67 | variable_size (tree size) | |||
68 | { | |||
69 | /* Obviously. */ | |||
70 | if (TREE_CONSTANT (size)((non_type_check ((size), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 70, __FUNCTION__))->base.constant_flag)) | |||
71 | return size; | |||
72 | ||||
73 | /* If the size is self-referential, we can't make a SAVE_EXPR (see | |||
74 | save_expr for the rationale). But we can do something else. */ | |||
75 | if (CONTAINS_PLACEHOLDER_P (size)((size) != 0 && ! ((non_type_check ((size), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 75, __FUNCTION__))->base.constant_flag) && contains_placeholder_p (size))) | |||
76 | return self_referential_size (size); | |||
77 | ||||
78 | /* If we are in the global binding level, we can't make a SAVE_EXPR | |||
79 | since it may end up being shared across functions, so it is up | |||
80 | to the front-end to deal with this case. */ | |||
81 | if (lang_hooks.decls.global_bindings_p ()) | |||
82 | return size; | |||
83 | ||||
84 | return save_expr (size); | |||
85 | } | |||
86 | ||||
87 | /* An array of functions used for self-referential size computation. */ | |||
88 | static GTY(()) vec<tree, va_gc> *size_functions; | |||
89 | ||||
90 | /* Return true if T is a self-referential component reference. */ | |||
91 | ||||
92 | static bool | |||
93 | self_referential_component_ref_p (tree t) | |||
94 | { | |||
95 | if (TREE_CODE (t)((enum tree_code) (t)->base.code) != COMPONENT_REF) | |||
96 | return false; | |||
97 | ||||
98 | while (REFERENCE_CLASS_P (t)(tree_code_type_tmpl <0>::tree_code_type[(int) (((enum tree_code ) (t)->base.code))] == tcc_reference)) | |||
99 | t = TREE_OPERAND (t, 0)(*((const_cast<tree*> (tree_operand_check ((t), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 99, __FUNCTION__))))); | |||
100 | ||||
101 | return (TREE_CODE (t)((enum tree_code) (t)->base.code) == PLACEHOLDER_EXPR); | |||
102 | } | |||
103 | ||||
104 | /* Similar to copy_tree_r but do not copy component references involving | |||
105 | PLACEHOLDER_EXPRs. These nodes are spotted in find_placeholder_in_expr | |||
106 | and substituted in substitute_in_expr. */ | |||
107 | ||||
108 | static tree | |||
109 | copy_self_referential_tree_r (tree *tp, int *walk_subtrees, void *data) | |||
110 | { | |||
111 | enum tree_code code = TREE_CODE (*tp)((enum tree_code) (*tp)->base.code); | |||
112 | ||||
113 | /* Stop at types, decls, constants like copy_tree_r. */ | |||
114 | if (TREE_CODE_CLASS (code)tree_code_type_tmpl <0>::tree_code_type[(int) (code)] == tcc_type | |||
115 | || TREE_CODE_CLASS (code)tree_code_type_tmpl <0>::tree_code_type[(int) (code)] == tcc_declaration | |||
116 | || TREE_CODE_CLASS (code)tree_code_type_tmpl <0>::tree_code_type[(int) (code)] == tcc_constant) | |||
117 | { | |||
118 | *walk_subtrees = 0; | |||
119 | return NULL_TREE(tree) nullptr; | |||
120 | } | |||
121 | ||||
122 | /* This is the pattern built in ada/make_aligning_type. */ | |||
123 | else if (code == ADDR_EXPR | |||
124 | && TREE_CODE (TREE_OPERAND (*tp, 0))((enum tree_code) ((*((const_cast<tree*> (tree_operand_check ((*tp), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 124, __FUNCTION__))))))->base.code) == PLACEHOLDER_EXPR) | |||
125 | { | |||
126 | *walk_subtrees = 0; | |||
127 | return NULL_TREE(tree) nullptr; | |||
128 | } | |||
129 | ||||
130 | /* Default case: the component reference. */ | |||
131 | else if (self_referential_component_ref_p (*tp)) | |||
132 | { | |||
133 | *walk_subtrees = 0; | |||
134 | return NULL_TREE(tree) nullptr; | |||
135 | } | |||
136 | ||||
137 | /* We're not supposed to have them in self-referential size trees | |||
138 | because we wouldn't properly control when they are evaluated. | |||
139 | However, not creating superfluous SAVE_EXPRs requires accurate | |||
140 | tracking of readonly-ness all the way down to here, which we | |||
141 | cannot always guarantee in practice. So punt in this case. */ | |||
142 | else if (code == SAVE_EXPR) | |||
143 | return error_mark_nodeglobal_trees[TI_ERROR_MARK]; | |||
144 | ||||
145 | else if (code == STATEMENT_LIST) | |||
146 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 146, __FUNCTION__)); | |||
147 | ||||
148 | return copy_tree_r (tp, walk_subtrees, data); | |||
149 | } | |||
150 | ||||
151 | /* Given a SIZE expression that is self-referential, return an equivalent | |||
152 | expression to serve as the actual size expression for a type. */ | |||
153 | ||||
154 | static tree | |||
155 | self_referential_size (tree size) | |||
156 | { | |||
157 | static unsigned HOST_WIDE_INTlong fnno = 0; | |||
158 | vec<tree> self_refs = vNULL; | |||
159 | tree param_type_list = NULLnullptr, param_decl_list = NULLnullptr; | |||
160 | tree t, ref, return_type, fntype, fnname, fndecl; | |||
161 | unsigned int i; | |||
162 | char buf[128]; | |||
163 | vec<tree, va_gc> *args = NULLnullptr; | |||
164 | ||||
165 | /* Do not factor out simple operations. */ | |||
166 | t = skip_simple_constant_arithmetic (size); | |||
167 | if (TREE_CODE (t)((enum tree_code) (t)->base.code) == CALL_EXPR || self_referential_component_ref_p (t)) | |||
168 | return size; | |||
169 | ||||
170 | /* Collect the list of self-references in the expression. */ | |||
171 | find_placeholder_in_expr (size, &self_refs); | |||
172 | gcc_assert (self_refs.length () > 0)((void)(!(self_refs.length () > 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 172, __FUNCTION__), 0 : 0)); | |||
173 | ||||
174 | /* Obtain a private copy of the expression. */ | |||
175 | t = size; | |||
176 | if (walk_tree (&t, copy_self_referential_tree_r, NULL, NULL)walk_tree_1 (&t, copy_self_referential_tree_r, nullptr, nullptr , nullptr) != NULL_TREE(tree) nullptr) | |||
177 | return size; | |||
178 | size = t; | |||
179 | ||||
180 | /* Build the parameter and argument lists in parallel; also | |||
181 | substitute the former for the latter in the expression. */ | |||
182 | vec_alloc (args, self_refs.length ()); | |||
183 | FOR_EACH_VEC_ELT (self_refs, i, ref)for (i = 0; (self_refs).iterate ((i), &(ref)); ++(i)) | |||
184 | { | |||
185 | tree subst, param_name, param_type, param_decl; | |||
186 | ||||
187 | if (DECL_P (ref)(tree_code_type_tmpl <0>::tree_code_type[(int) (((enum tree_code ) (ref)->base.code))] == tcc_declaration)) | |||
188 | { | |||
189 | /* We shouldn't have true variables here. */ | |||
190 | gcc_assert (TREE_READONLY (ref))((void)(!(((non_type_check ((ref), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 190, __FUNCTION__))->base.readonly_flag)) ? fancy_abort ( "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 190, __FUNCTION__), 0 : 0)); | |||
191 | subst = ref; | |||
192 | } | |||
193 | /* This is the pattern built in ada/make_aligning_type. */ | |||
194 | else if (TREE_CODE (ref)((enum tree_code) (ref)->base.code) == ADDR_EXPR) | |||
195 | subst = ref; | |||
196 | /* Default case: the component reference. */ | |||
197 | else | |||
198 | subst = TREE_OPERAND (ref, 1)(*((const_cast<tree*> (tree_operand_check ((ref), (1), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 198, __FUNCTION__))))); | |||
199 | ||||
200 | sprintf (buf, "p%d", i); | |||
201 | param_name = get_identifier (buf)(__builtin_constant_p (buf) ? get_identifier_with_length ((buf ), strlen (buf)) : get_identifier (buf)); | |||
202 | param_type = TREE_TYPE (ref)((contains_struct_check ((ref), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 202, __FUNCTION__))->typed.type); | |||
203 | param_decl | |||
204 | = build_decl (input_location, PARM_DECL, param_name, param_type); | |||
205 | DECL_ARG_TYPE (param_decl)((tree_check ((param_decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 205, __FUNCTION__, (PARM_DECL)))->decl_common.initial) = param_type; | |||
206 | DECL_ARTIFICIAL (param_decl)((contains_struct_check ((param_decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 206, __FUNCTION__))->decl_common.artificial_flag) = 1; | |||
207 | TREE_READONLY (param_decl)((non_type_check ((param_decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 207, __FUNCTION__))->base.readonly_flag) = 1; | |||
208 | ||||
209 | size = substitute_in_expr (size, subst, param_decl); | |||
210 | ||||
211 | param_type_list = tree_cons (NULL_TREE(tree) nullptr, param_type, param_type_list); | |||
212 | param_decl_list = chainon (param_decl, param_decl_list); | |||
213 | args->quick_push (ref); | |||
| ||||
214 | } | |||
215 | ||||
216 | self_refs.release (); | |||
217 | ||||
218 | /* Append 'void' to indicate that the number of parameters is fixed. */ | |||
219 | param_type_list = tree_cons (NULL_TREE(tree) nullptr, void_type_nodeglobal_trees[TI_VOID_TYPE], param_type_list); | |||
220 | ||||
221 | /* The 3 lists have been created in reverse order. */ | |||
222 | param_type_list = nreverse (param_type_list); | |||
223 | param_decl_list = nreverse (param_decl_list); | |||
224 | ||||
225 | /* Build the function type. */ | |||
226 | return_type = TREE_TYPE (size)((contains_struct_check ((size), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 226, __FUNCTION__))->typed.type); | |||
227 | fntype = build_function_type (return_type, param_type_list); | |||
228 | ||||
229 | /* Build the function declaration. */ | |||
230 | sprintf (buf, "SZ" HOST_WIDE_INT_PRINT_UNSIGNED"%" "l" "u", fnno++); | |||
231 | fnname = get_file_function_name (buf); | |||
232 | fndecl = build_decl (input_location, FUNCTION_DECL, fnname, fntype); | |||
233 | for (t = param_decl_list; t; t = DECL_CHAIN (t)(((contains_struct_check (((contains_struct_check ((t), (TS_DECL_MINIMAL ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 233, __FUNCTION__))), (TS_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 233, __FUNCTION__))->common.chain))) | |||
234 | DECL_CONTEXT (t)((contains_struct_check ((t), (TS_DECL_MINIMAL), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 234, __FUNCTION__))->decl_minimal.context) = fndecl; | |||
235 | DECL_ARGUMENTS (fndecl)((tree_check ((fndecl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 235, __FUNCTION__, (FUNCTION_DECL)))->function_decl.arguments ) = param_decl_list; | |||
236 | DECL_RESULT (fndecl)((tree_check ((fndecl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 236, __FUNCTION__, (FUNCTION_DECL)))->decl_non_common.result ) | |||
237 | = build_decl (input_location, RESULT_DECL, 0, return_type); | |||
238 | DECL_CONTEXT (DECL_RESULT (fndecl))((contains_struct_check ((((tree_check ((fndecl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 238, __FUNCTION__, (FUNCTION_DECL)))->decl_non_common.result )), (TS_DECL_MINIMAL), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 238, __FUNCTION__))->decl_minimal.context) = fndecl; | |||
239 | ||||
240 | /* The function has been created by the compiler and we don't | |||
241 | want to emit debug info for it. */ | |||
242 | DECL_ARTIFICIAL (fndecl)((contains_struct_check ((fndecl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 242, __FUNCTION__))->decl_common.artificial_flag) = 1; | |||
243 | DECL_IGNORED_P (fndecl)((contains_struct_check ((fndecl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 243, __FUNCTION__))->decl_common.ignored_flag) = 1; | |||
244 | ||||
245 | /* It is supposed to be "const" and never throw. */ | |||
246 | TREE_READONLY (fndecl)((non_type_check ((fndecl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 246, __FUNCTION__))->base.readonly_flag) = 1; | |||
247 | TREE_NOTHROW (fndecl)((fndecl)->base.nothrow_flag) = 1; | |||
248 | ||||
249 | /* We want it to be inlined when this is deemed profitable, as | |||
250 | well as discarded if every call has been integrated. */ | |||
251 | DECL_DECLARED_INLINE_P (fndecl)((tree_check ((fndecl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 251, __FUNCTION__, (FUNCTION_DECL)))->function_decl.declared_inline_flag ) = 1; | |||
252 | ||||
253 | /* It is made up of a unique return statement. */ | |||
254 | DECL_INITIAL (fndecl)((contains_struct_check ((fndecl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 254, __FUNCTION__))->decl_common.initial) = make_node (BLOCK); | |||
255 | BLOCK_SUPERCONTEXT (DECL_INITIAL (fndecl))((tree_check ((((contains_struct_check ((fndecl), (TS_DECL_COMMON ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 255, __FUNCTION__))->decl_common.initial)), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 255, __FUNCTION__, (BLOCK)))->block.supercontext) = fndecl; | |||
256 | t = build2 (MODIFY_EXPR, return_type, DECL_RESULT (fndecl)((tree_check ((fndecl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 256, __FUNCTION__, (FUNCTION_DECL)))->decl_non_common.result ), size); | |||
257 | DECL_SAVED_TREE (fndecl)((tree_check ((fndecl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 257, __FUNCTION__, (FUNCTION_DECL)))->function_decl.saved_tree ) = build1 (RETURN_EXPR, void_type_nodeglobal_trees[TI_VOID_TYPE], t); | |||
258 | TREE_STATIC (fndecl)((fndecl)->base.static_flag) = 1; | |||
259 | ||||
260 | /* Put it onto the list of size functions. */ | |||
261 | vec_safe_push (size_functions, fndecl); | |||
262 | ||||
263 | /* Replace the original expression with a call to the size function. */ | |||
264 | return build_call_expr_loc_vec (UNKNOWN_LOCATION((location_t) 0), fndecl, args); | |||
265 | } | |||
266 | ||||
267 | /* Take, queue and compile all the size functions. It is essential that | |||
268 | the size functions be gimplified at the very end of the compilation | |||
269 | in order to guarantee transparent handling of self-referential sizes. | |||
270 | Otherwise the GENERIC inliner would not be able to inline them back | |||
271 | at each of their call sites, thus creating artificial non-constant | |||
272 | size expressions which would trigger nasty problems later on. */ | |||
273 | ||||
274 | void | |||
275 | finalize_size_functions (void) | |||
276 | { | |||
277 | unsigned int i; | |||
278 | tree fndecl; | |||
279 | ||||
280 | for (i = 0; size_functions && size_functions->iterate (i, &fndecl); i++) | |||
281 | { | |||
282 | allocate_struct_function (fndecl, false); | |||
283 | set_cfun (NULLnullptr); | |||
284 | dump_function (TDI_original, fndecl); | |||
285 | ||||
286 | /* As these functions are used to describe the layout of variable-length | |||
287 | structures, debug info generation needs their implementation. */ | |||
288 | debug_hooks->size_function (fndecl); | |||
289 | gimplify_function_tree (fndecl); | |||
290 | cgraph_node::finalize_function (fndecl, false); | |||
291 | } | |||
292 | ||||
293 | vec_free (size_functions); | |||
294 | } | |||
295 | ||||
296 | /* Return a machine mode of class MCLASS with SIZE bits of precision, | |||
297 | if one exists. The mode may have padding bits as well the SIZE | |||
298 | value bits. If LIMIT is nonzero, disregard modes wider than | |||
299 | MAX_FIXED_MODE_SIZE. */ | |||
300 | ||||
301 | opt_machine_mode | |||
302 | mode_for_size (poly_uint64 size, enum mode_class mclass, int limit) | |||
303 | { | |||
304 | machine_mode mode; | |||
305 | int i; | |||
306 | ||||
307 | if (limit && maybe_gt (size, (unsigned int) MAX_FIXED_MODE_SIZE)maybe_lt ((unsigned int) GET_MODE_BITSIZE (((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) ? (scalar_int_mode ((scalar_int_mode ::from_int) E_TImode)) : (scalar_int_mode ((scalar_int_mode:: from_int) E_DImode))), size)) | |||
308 | return opt_machine_mode (); | |||
309 | ||||
310 | /* Get the first mode which has this size, in the specified class. */ | |||
311 | FOR_EACH_MODE_IN_CLASS (mode, mclass)for (mode_iterator::start (&(mode), mclass); mode_iterator ::iterate_p (&(mode)); mode_iterator::get_next (&(mode ))) | |||
312 | if (known_eq (GET_MODE_PRECISION (mode), size)(!maybe_ne (GET_MODE_PRECISION (mode), size))) | |||
313 | return mode; | |||
314 | ||||
315 | if (mclass == MODE_INT || mclass == MODE_PARTIAL_INT) | |||
316 | for (i = 0; i < NUM_INT_N_ENTS1; i ++) | |||
317 | if (known_eq (int_n_data[i].bitsize, size)(!maybe_ne (int_n_data[i].bitsize, size)) | |||
318 | && int_n_enabled_p[i]) | |||
319 | return int_n_data[i].m; | |||
320 | ||||
321 | return opt_machine_mode (); | |||
322 | } | |||
323 | ||||
324 | /* Similar, except passed a tree node. */ | |||
325 | ||||
326 | opt_machine_mode | |||
327 | mode_for_size_tree (const_tree size, enum mode_class mclass, int limit) | |||
328 | { | |||
329 | unsigned HOST_WIDE_INTlong uhwi; | |||
330 | unsigned int ui; | |||
331 | ||||
332 | if (!tree_fits_uhwi_p (size)) | |||
333 | return opt_machine_mode (); | |||
334 | uhwi = tree_to_uhwi (size); | |||
335 | ui = uhwi; | |||
336 | if (uhwi != ui) | |||
337 | return opt_machine_mode (); | |||
338 | return mode_for_size (ui, mclass, limit); | |||
339 | } | |||
340 | ||||
341 | /* Return the narrowest mode of class MCLASS that contains at least | |||
342 | SIZE bits. Abort if no such mode exists. */ | |||
343 | ||||
344 | machine_mode | |||
345 | smallest_mode_for_size (poly_uint64 size, enum mode_class mclass) | |||
346 | { | |||
347 | machine_mode mode = VOIDmode((void) 0, E_VOIDmode); | |||
348 | int i; | |||
349 | ||||
350 | /* Get the first mode which has at least this size, in the | |||
351 | specified class. */ | |||
352 | FOR_EACH_MODE_IN_CLASS (mode, mclass)for (mode_iterator::start (&(mode), mclass); mode_iterator ::iterate_p (&(mode)); mode_iterator::get_next (&(mode ))) | |||
353 | if (known_ge (GET_MODE_PRECISION (mode), size)(!maybe_lt (GET_MODE_PRECISION (mode), size))) | |||
354 | break; | |||
355 | ||||
356 | gcc_assert (mode != VOIDmode)((void)(!(mode != ((void) 0, E_VOIDmode)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 356, __FUNCTION__), 0 : 0)); | |||
357 | ||||
358 | if (mclass == MODE_INT || mclass == MODE_PARTIAL_INT) | |||
359 | for (i = 0; i < NUM_INT_N_ENTS1; i ++) | |||
360 | if (known_ge (int_n_data[i].bitsize, size)(!maybe_lt (int_n_data[i].bitsize, size)) | |||
361 | && known_lt (int_n_data[i].bitsize, GET_MODE_PRECISION (mode))(!maybe_le (GET_MODE_PRECISION (mode), int_n_data[i].bitsize) ) | |||
362 | && int_n_enabled_p[i]) | |||
363 | mode = int_n_data[i].m; | |||
364 | ||||
365 | return mode; | |||
366 | } | |||
367 | ||||
368 | /* Return an integer mode of exactly the same size as MODE, if one exists. */ | |||
369 | ||||
370 | opt_scalar_int_mode | |||
371 | int_mode_for_mode (machine_mode mode) | |||
372 | { | |||
373 | switch (GET_MODE_CLASS (mode)((enum mode_class) mode_class[mode])) | |||
374 | { | |||
375 | case MODE_INT: | |||
376 | case MODE_PARTIAL_INT: | |||
377 | return as_a <scalar_int_mode> (mode); | |||
378 | ||||
379 | case MODE_COMPLEX_INT: | |||
380 | case MODE_COMPLEX_FLOAT: | |||
381 | case MODE_FLOAT: | |||
382 | case MODE_DECIMAL_FLOAT: | |||
383 | case MODE_FRACT: | |||
384 | case MODE_ACCUM: | |||
385 | case MODE_UFRACT: | |||
386 | case MODE_UACCUM: | |||
387 | case MODE_VECTOR_BOOL: | |||
388 | case MODE_VECTOR_INT: | |||
389 | case MODE_VECTOR_FLOAT: | |||
390 | case MODE_VECTOR_FRACT: | |||
391 | case MODE_VECTOR_ACCUM: | |||
392 | case MODE_VECTOR_UFRACT: | |||
393 | case MODE_VECTOR_UACCUM: | |||
394 | return int_mode_for_size (GET_MODE_BITSIZE (mode), 0); | |||
395 | ||||
396 | case MODE_OPAQUE: | |||
397 | return opt_scalar_int_mode (); | |||
398 | ||||
399 | case MODE_RANDOM: | |||
400 | if (mode == BLKmode((void) 0, E_BLKmode)) | |||
401 | return opt_scalar_int_mode (); | |||
402 | ||||
403 | /* fall through */ | |||
404 | ||||
405 | case MODE_CC: | |||
406 | default: | |||
407 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 407, __FUNCTION__)); | |||
408 | } | |||
409 | } | |||
410 | ||||
411 | /* Find a mode that can be used for efficient bitwise operations on MODE, | |||
412 | if one exists. */ | |||
413 | ||||
414 | opt_machine_mode | |||
415 | bitwise_mode_for_mode (machine_mode mode) | |||
416 | { | |||
417 | /* Quick exit if we already have a suitable mode. */ | |||
418 | scalar_int_mode int_mode; | |||
419 | if (is_a <scalar_int_mode> (mode, &int_mode) | |||
420 | && GET_MODE_BITSIZE (int_mode) <= MAX_FIXED_MODE_SIZEGET_MODE_BITSIZE (((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) ? (scalar_int_mode ((scalar_int_mode::from_int ) E_TImode)) : (scalar_int_mode ((scalar_int_mode::from_int) E_DImode )))) | |||
421 | return int_mode; | |||
422 | ||||
423 | /* Reuse the sanity checks from int_mode_for_mode. */ | |||
424 | gcc_checking_assert ((int_mode_for_mode (mode), true))((void)(!((int_mode_for_mode (mode), true)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 424, __FUNCTION__), 0 : 0)); | |||
425 | ||||
426 | poly_int64 bitsize = GET_MODE_BITSIZE (mode); | |||
427 | ||||
428 | /* Try to replace complex modes with complex modes. In general we | |||
429 | expect both components to be processed independently, so we only | |||
430 | care whether there is a register for the inner mode. */ | |||
431 | if (COMPLEX_MODE_P (mode)(((enum mode_class) mode_class[mode]) == MODE_COMPLEX_INT || ( (enum mode_class) mode_class[mode]) == MODE_COMPLEX_FLOAT)) | |||
432 | { | |||
433 | machine_mode trial = mode; | |||
434 | if ((GET_MODE_CLASS (trial)((enum mode_class) mode_class[trial]) == MODE_COMPLEX_INT | |||
435 | || mode_for_size (bitsize, MODE_COMPLEX_INT, false).exists (&trial)) | |||
436 | && have_regs_of_mode(this_target_regs->x_have_regs_of_mode)[GET_MODE_INNER (trial)(mode_to_inner (trial))]) | |||
437 | return trial; | |||
438 | } | |||
439 | ||||
440 | /* Try to replace vector modes with vector modes. Also try using vector | |||
441 | modes if an integer mode would be too big. */ | |||
442 | if (VECTOR_MODE_P (mode)(((enum mode_class) mode_class[mode]) == MODE_VECTOR_BOOL || ( (enum mode_class) mode_class[mode]) == MODE_VECTOR_INT || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_FLOAT || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_FRACT || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_UFRACT || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_ACCUM || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_UACCUM) | |||
443 | || maybe_gt (bitsize, MAX_FIXED_MODE_SIZE)maybe_lt (GET_MODE_BITSIZE (((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) ? (scalar_int_mode ((scalar_int_mode ::from_int) E_TImode)) : (scalar_int_mode ((scalar_int_mode:: from_int) E_DImode))), bitsize)) | |||
444 | { | |||
445 | machine_mode trial = mode; | |||
446 | if ((GET_MODE_CLASS (trial)((enum mode_class) mode_class[trial]) == MODE_VECTOR_INT | |||
447 | || mode_for_size (bitsize, MODE_VECTOR_INT, 0).exists (&trial)) | |||
448 | && have_regs_of_mode(this_target_regs->x_have_regs_of_mode)[trial] | |||
449 | && targetm.vector_mode_supported_p (trial)) | |||
450 | return trial; | |||
451 | } | |||
452 | ||||
453 | /* Otherwise fall back on integers while honoring MAX_FIXED_MODE_SIZE. */ | |||
454 | return mode_for_size (bitsize, MODE_INT, true); | |||
455 | } | |||
456 | ||||
457 | /* Find a type that can be used for efficient bitwise operations on MODE. | |||
458 | Return null if no such mode exists. */ | |||
459 | ||||
460 | tree | |||
461 | bitwise_type_for_mode (machine_mode mode) | |||
462 | { | |||
463 | if (!bitwise_mode_for_mode (mode).exists (&mode)) | |||
464 | return NULL_TREE(tree) nullptr; | |||
465 | ||||
466 | unsigned int inner_size = GET_MODE_UNIT_BITSIZE (mode)((unsigned short) (mode_to_unit_size (mode) * (8))); | |||
467 | tree inner_type = build_nonstandard_integer_type (inner_size, true); | |||
468 | ||||
469 | if (VECTOR_MODE_P (mode)(((enum mode_class) mode_class[mode]) == MODE_VECTOR_BOOL || ( (enum mode_class) mode_class[mode]) == MODE_VECTOR_INT || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_FLOAT || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_FRACT || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_UFRACT || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_ACCUM || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_UACCUM)) | |||
470 | return build_vector_type_for_mode (inner_type, mode); | |||
471 | ||||
472 | if (COMPLEX_MODE_P (mode)(((enum mode_class) mode_class[mode]) == MODE_COMPLEX_INT || ( (enum mode_class) mode_class[mode]) == MODE_COMPLEX_FLOAT)) | |||
473 | return build_complex_type (inner_type); | |||
474 | ||||
475 | gcc_checking_assert (GET_MODE_INNER (mode) == mode)((void)(!((mode_to_inner (mode)) == mode) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 475, __FUNCTION__), 0 : 0)); | |||
476 | return inner_type; | |||
477 | } | |||
478 | ||||
479 | /* Find a mode that is suitable for representing a vector with NUNITS | |||
480 | elements of mode INNERMODE, if one exists. The returned mode can be | |||
481 | either an integer mode or a vector mode. */ | |||
482 | ||||
483 | opt_machine_mode | |||
484 | mode_for_vector (scalar_mode innermode, poly_uint64 nunits) | |||
485 | { | |||
486 | machine_mode mode; | |||
487 | ||||
488 | /* First, look for a supported vector type. */ | |||
489 | if (SCALAR_FLOAT_MODE_P (innermode)(((enum mode_class) mode_class[innermode]) == MODE_FLOAT || ( (enum mode_class) mode_class[innermode]) == MODE_DECIMAL_FLOAT )) | |||
490 | mode = MIN_MODE_VECTOR_FLOAT; | |||
491 | else if (SCALAR_FRACT_MODE_P (innermode)(((enum mode_class) mode_class[innermode]) == MODE_FRACT)) | |||
492 | mode = MIN_MODE_VECTOR_FRACT; | |||
493 | else if (SCALAR_UFRACT_MODE_P (innermode)(((enum mode_class) mode_class[innermode]) == MODE_UFRACT)) | |||
494 | mode = MIN_MODE_VECTOR_UFRACT; | |||
495 | else if (SCALAR_ACCUM_MODE_P (innermode)(((enum mode_class) mode_class[innermode]) == MODE_ACCUM)) | |||
496 | mode = MIN_MODE_VECTOR_ACCUM; | |||
497 | else if (SCALAR_UACCUM_MODE_P (innermode)(((enum mode_class) mode_class[innermode]) == MODE_UACCUM)) | |||
498 | mode = MIN_MODE_VECTOR_UACCUM; | |||
499 | else | |||
500 | mode = MIN_MODE_VECTOR_INT; | |||
501 | ||||
502 | /* Do not check vector_mode_supported_p here. We'll do that | |||
503 | later in vector_type_mode. */ | |||
504 | FOR_EACH_MODE_FROM (mode, mode)for ((mode) = (mode); mode_iterator::iterate_p (&(mode)); mode_iterator::get_next (&(mode))) | |||
505 | if (known_eq (GET_MODE_NUNITS (mode), nunits)(!maybe_ne (GET_MODE_NUNITS (mode), nunits)) | |||
506 | && GET_MODE_INNER (mode)(mode_to_inner (mode)) == innermode) | |||
507 | return mode; | |||
508 | ||||
509 | /* For integers, try mapping it to a same-sized scalar mode. */ | |||
510 | if (GET_MODE_CLASS (innermode)((enum mode_class) mode_class[innermode]) == MODE_INT) | |||
511 | { | |||
512 | poly_uint64 nbits = nunits * GET_MODE_BITSIZE (innermode); | |||
513 | if (int_mode_for_size (nbits, 0).exists (&mode) | |||
514 | && have_regs_of_mode(this_target_regs->x_have_regs_of_mode)[mode]) | |||
515 | return mode; | |||
516 | } | |||
517 | ||||
518 | return opt_machine_mode (); | |||
519 | } | |||
520 | ||||
521 | /* If a piece of code is using vector mode VECTOR_MODE and also wants | |||
522 | to operate on elements of mode ELEMENT_MODE, return the vector mode | |||
523 | it should use for those elements. If NUNITS is nonzero, ensure that | |||
524 | the mode has exactly NUNITS elements, otherwise pick whichever vector | |||
525 | size pairs the most naturally with VECTOR_MODE; this may mean choosing | |||
526 | a mode with a different size and/or number of elements, depending on | |||
527 | what the target prefers. Return an empty opt_machine_mode if there | |||
528 | is no supported vector mode with the required properties. | |||
529 | ||||
530 | Unlike mode_for_vector. any returned mode is guaranteed to satisfy | |||
531 | both VECTOR_MODE_P and targetm.vector_mode_supported_p. */ | |||
532 | ||||
533 | opt_machine_mode | |||
534 | related_vector_mode (machine_mode vector_mode, scalar_mode element_mode, | |||
535 | poly_uint64 nunits) | |||
536 | { | |||
537 | gcc_assert (VECTOR_MODE_P (vector_mode))((void)(!((((enum mode_class) mode_class[vector_mode]) == MODE_VECTOR_BOOL || ((enum mode_class) mode_class[vector_mode]) == MODE_VECTOR_INT || ((enum mode_class) mode_class[vector_mode]) == MODE_VECTOR_FLOAT || ((enum mode_class) mode_class[vector_mode]) == MODE_VECTOR_FRACT || ((enum mode_class) mode_class[vector_mode]) == MODE_VECTOR_UFRACT || ((enum mode_class) mode_class[vector_mode]) == MODE_VECTOR_ACCUM || ((enum mode_class) mode_class[vector_mode]) == MODE_VECTOR_UACCUM )) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 537, __FUNCTION__), 0 : 0)); | |||
538 | return targetm.vectorize.related_mode (vector_mode, element_mode, nunits); | |||
539 | } | |||
540 | ||||
541 | /* If a piece of code is using vector mode VECTOR_MODE and also wants | |||
542 | to operate on integer vectors with the same element size and number | |||
543 | of elements, return the vector mode it should use. Return an empty | |||
544 | opt_machine_mode if there is no supported vector mode with the | |||
545 | required properties. | |||
546 | ||||
547 | Unlike mode_for_vector. any returned mode is guaranteed to satisfy | |||
548 | both VECTOR_MODE_P and targetm.vector_mode_supported_p. */ | |||
549 | ||||
550 | opt_machine_mode | |||
551 | related_int_vector_mode (machine_mode vector_mode) | |||
552 | { | |||
553 | gcc_assert (VECTOR_MODE_P (vector_mode))((void)(!((((enum mode_class) mode_class[vector_mode]) == MODE_VECTOR_BOOL || ((enum mode_class) mode_class[vector_mode]) == MODE_VECTOR_INT || ((enum mode_class) mode_class[vector_mode]) == MODE_VECTOR_FLOAT || ((enum mode_class) mode_class[vector_mode]) == MODE_VECTOR_FRACT || ((enum mode_class) mode_class[vector_mode]) == MODE_VECTOR_UFRACT || ((enum mode_class) mode_class[vector_mode]) == MODE_VECTOR_ACCUM || ((enum mode_class) mode_class[vector_mode]) == MODE_VECTOR_UACCUM )) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 553, __FUNCTION__), 0 : 0)); | |||
554 | scalar_int_mode int_mode; | |||
555 | if (int_mode_for_mode (GET_MODE_INNER (vector_mode)(mode_to_inner (vector_mode))).exists (&int_mode)) | |||
556 | return related_vector_mode (vector_mode, int_mode, | |||
557 | GET_MODE_NUNITS (vector_mode)); | |||
558 | return opt_machine_mode (); | |||
559 | } | |||
560 | ||||
561 | /* Return the alignment of MODE. This will be bounded by 1 and | |||
562 | BIGGEST_ALIGNMENT. */ | |||
563 | ||||
564 | unsigned int | |||
565 | get_mode_alignment (machine_mode mode) | |||
566 | { | |||
567 | return MIN (BIGGEST_ALIGNMENT, MAX (1, mode_base_align[mode]*BITS_PER_UNIT))(((((global_options.x_target_flags & (1U << 12)) != 0) ? 32 : (((global_options.x_ix86_isa_flags & (1UL << 15)) != 0) ? 512 : (((global_options.x_ix86_isa_flags & ( 1UL << 8)) != 0) ? 256 : 128)))) < (((1) > (mode_base_align [mode]*(8)) ? (1) : (mode_base_align[mode]*(8)))) ? ((((global_options .x_target_flags & (1U << 12)) != 0) ? 32 : (((global_options .x_ix86_isa_flags & (1UL << 15)) != 0) ? 512 : (((global_options .x_ix86_isa_flags & (1UL << 8)) != 0) ? 256 : 128)) )) : (((1) > (mode_base_align[mode]*(8)) ? (1) : (mode_base_align [mode]*(8))))); | |||
568 | } | |||
569 | ||||
570 | /* Return the natural mode of an array, given that it is SIZE bytes in | |||
571 | total and has elements of type ELEM_TYPE. */ | |||
572 | ||||
573 | static machine_mode | |||
574 | mode_for_array (tree elem_type, tree size) | |||
575 | { | |||
576 | tree elem_size; | |||
577 | poly_uint64 int_size, int_elem_size; | |||
578 | unsigned HOST_WIDE_INTlong num_elems; | |||
579 | bool limit_p; | |||
580 | ||||
581 | /* One-element arrays get the component type's mode. */ | |||
582 | elem_size = TYPE_SIZE (elem_type)((tree_class_check ((elem_type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 582, __FUNCTION__))->type_common.size); | |||
583 | if (simple_cst_equal (size, elem_size)) | |||
584 | return TYPE_MODE (elem_type)((((enum tree_code) ((tree_class_check ((elem_type), (tcc_type ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 584, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (elem_type) : (elem_type)->type_common.mode); | |||
585 | ||||
586 | limit_p = true; | |||
587 | if (poly_int_tree_p (size, &int_size) | |||
588 | && poly_int_tree_p (elem_size, &int_elem_size) | |||
589 | && maybe_ne (int_elem_size, 0U) | |||
590 | && constant_multiple_p (int_size, int_elem_size, &num_elems)) | |||
591 | { | |||
592 | machine_mode elem_mode = TYPE_MODE (elem_type)((((enum tree_code) ((tree_class_check ((elem_type), (tcc_type ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 592, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (elem_type) : (elem_type)->type_common.mode); | |||
593 | machine_mode mode; | |||
594 | if (targetm.array_mode (elem_mode, num_elems).exists (&mode)) | |||
595 | return mode; | |||
596 | if (targetm.array_mode_supported_p (elem_mode, num_elems)) | |||
597 | limit_p = false; | |||
598 | } | |||
599 | return mode_for_size_tree (size, MODE_INT, limit_p).else_blk (); | |||
600 | } | |||
601 | ||||
602 | /* Subroutine of layout_decl: Force alignment required for the data type. | |||
603 | But if the decl itself wants greater alignment, don't override that. */ | |||
604 | ||||
605 | static inline void | |||
606 | do_type_align (tree type, tree decl) | |||
607 | { | |||
608 | if (TYPE_ALIGN (type)(((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 608, __FUNCTION__))->type_common.align) ? ((unsigned)1) << (((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 608, __FUNCTION__))->type_common.align) - 1) : 0) > DECL_ALIGN (decl)(((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 608, __FUNCTION__))->decl_common.align) ? ((unsigned)1) << (((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 608, __FUNCTION__))->decl_common.align) - 1) : 0)) | |||
609 | { | |||
610 | SET_DECL_ALIGN (decl, TYPE_ALIGN (type))(((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 610, __FUNCTION__))->decl_common.align) = ffs_hwi ((((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 610, __FUNCTION__))->type_common.align) ? ((unsigned)1) << (((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 610, __FUNCTION__))->type_common.align) - 1) : 0))); | |||
611 | if (TREE_CODE (decl)((enum tree_code) (decl)->base.code) == FIELD_DECL) | |||
612 | DECL_USER_ALIGN (decl)((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 612, __FUNCTION__))->base.u.bits.user_align) = TYPE_USER_ALIGN (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 612, __FUNCTION__))->base.u.bits.user_align); | |||
613 | } | |||
614 | if (TYPE_WARN_IF_NOT_ALIGN (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 614, __FUNCTION__))->type_common.warn_if_not_align ? ((unsigned )1) << ((type)->type_common.warn_if_not_align - 1) : 0) > DECL_WARN_IF_NOT_ALIGN (decl)(((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 614, __FUNCTION__))->decl_common.warn_if_not_align) ? (( unsigned)1) << (((contains_struct_check ((decl), (TS_DECL_COMMON ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 614, __FUNCTION__))->decl_common.warn_if_not_align) - 1) : 0)) | |||
615 | SET_DECL_WARN_IF_NOT_ALIGN (decl, TYPE_WARN_IF_NOT_ALIGN (type))(((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 615, __FUNCTION__))->decl_common.warn_if_not_align) = ffs_hwi (((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 615, __FUNCTION__))->type_common.warn_if_not_align ? ((unsigned )1) << ((type)->type_common.warn_if_not_align - 1) : 0))); | |||
616 | } | |||
617 | ||||
618 | /* Set the size, mode and alignment of a ..._DECL node. | |||
619 | TYPE_DECL does need this for C++. | |||
620 | Note that LABEL_DECL and CONST_DECL nodes do not need this, | |||
621 | and FUNCTION_DECL nodes have them set up in a special (and simple) way. | |||
622 | Don't call layout_decl for them. | |||
623 | ||||
624 | KNOWN_ALIGN is the amount of alignment we can assume this | |||
625 | decl has with no special effort. It is relevant only for FIELD_DECLs | |||
626 | and depends on the previous fields. | |||
627 | All that matters about KNOWN_ALIGN is which powers of 2 divide it. | |||
628 | If KNOWN_ALIGN is 0, it means, "as much alignment as you like": | |||
629 | the record will be aligned to suit. */ | |||
630 | ||||
631 | void | |||
632 | layout_decl (tree decl, unsigned int known_align) | |||
633 | { | |||
634 | tree type = TREE_TYPE (decl)((contains_struct_check ((decl), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 634, __FUNCTION__))->typed.type); | |||
635 | enum tree_code code = TREE_CODE (decl)((enum tree_code) (decl)->base.code); | |||
636 | rtx rtl = NULL_RTX(rtx) 0; | |||
637 | location_t loc = DECL_SOURCE_LOCATION (decl)((contains_struct_check ((decl), (TS_DECL_MINIMAL), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 637, __FUNCTION__))->decl_minimal.locus); | |||
638 | ||||
639 | if (code == CONST_DECL) | |||
640 | return; | |||
641 | ||||
642 | gcc_assert (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL((void)(!(code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL || code == TYPE_DECL || code == FIELD_DECL) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 643, __FUNCTION__), 0 : 0)) | |||
643 | || code == TYPE_DECL || code == FIELD_DECL)((void)(!(code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL || code == TYPE_DECL || code == FIELD_DECL) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 643, __FUNCTION__), 0 : 0)); | |||
644 | ||||
645 | rtl = DECL_RTL_IF_SET (decl)((((tree_contains_struct[(((enum tree_code) (decl)->base.code ))][(TS_DECL_WRTL)])) && (contains_struct_check ((decl ), (TS_DECL_WRTL), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 645, __FUNCTION__))->decl_with_rtl.rtl != nullptr) ? ((contains_struct_check ((decl), (TS_DECL_WRTL), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 645, __FUNCTION__))->decl_with_rtl.rtl ? (decl)->decl_with_rtl .rtl : (make_decl_rtl (decl), (decl)->decl_with_rtl.rtl)) : nullptr); | |||
646 | ||||
647 | if (type == error_mark_nodeglobal_trees[TI_ERROR_MARK]) | |||
648 | type = void_type_nodeglobal_trees[TI_VOID_TYPE]; | |||
649 | ||||
650 | /* Usually the size and mode come from the data type without change, | |||
651 | however, the front-end may set the explicit width of the field, so its | |||
652 | size may not be the same as the size of its type. This happens with | |||
653 | bitfields, of course (an `int' bitfield may be only 2 bits, say), but it | |||
654 | also happens with other fields. For example, the C++ front-end creates | |||
655 | zero-sized fields corresponding to empty base classes, and depends on | |||
656 | layout_type setting DECL_FIELD_BITPOS correctly for the field. Set the | |||
657 | size in bytes from the size in bits. If we have already set the mode, | |||
658 | don't set it again since we can be called twice for FIELD_DECLs. */ | |||
659 | ||||
660 | DECL_UNSIGNED (decl)((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 660, __FUNCTION__))->base.u.bits.unsigned_flag) = TYPE_UNSIGNED (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 660, __FUNCTION__))->base.u.bits.unsigned_flag); | |||
661 | if (DECL_MODE (decl)((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 661, __FUNCTION__))->decl_common.mode) == VOIDmode((void) 0, E_VOIDmode)) | |||
662 | SET_DECL_MODE (decl, TYPE_MODE (type))((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 662, __FUNCTION__))->decl_common.mode = (((((enum tree_code ) ((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 662, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (type) : (type)->type_common.mode))); | |||
663 | ||||
664 | if (DECL_SIZE (decl)((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 664, __FUNCTION__))->decl_common.size) == 0) | |||
665 | { | |||
666 | DECL_SIZE (decl)((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 666, __FUNCTION__))->decl_common.size) = TYPE_SIZE (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 666, __FUNCTION__))->type_common.size); | |||
667 | DECL_SIZE_UNIT (decl)((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 667, __FUNCTION__))->decl_common.size_unit) = TYPE_SIZE_UNIT (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 667, __FUNCTION__))->type_common.size_unit); | |||
668 | } | |||
669 | else if (DECL_SIZE_UNIT (decl)((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 669, __FUNCTION__))->decl_common.size_unit) == 0) | |||
670 | DECL_SIZE_UNIT (decl)((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 670, __FUNCTION__))->decl_common.size_unit) | |||
671 | = fold_convert_loc (loc, sizetypesizetype_tab[(int) stk_sizetype], | |||
672 | size_binop_loc (loc, CEIL_DIV_EXPR, DECL_SIZE (decl)((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 672, __FUNCTION__))->decl_common.size), | |||
673 | bitsize_unit_nodeglobal_trees[TI_BITSIZE_UNIT])); | |||
674 | ||||
675 | if (code != FIELD_DECL) | |||
676 | /* For non-fields, update the alignment from the type. */ | |||
677 | do_type_align (type, decl); | |||
678 | else | |||
679 | /* For fields, it's a bit more complicated... */ | |||
680 | { | |||
681 | bool old_user_align = DECL_USER_ALIGN (decl)((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 681, __FUNCTION__))->base.u.bits.user_align); | |||
682 | bool zero_bitfield = false; | |||
683 | bool packed_p = DECL_PACKED (decl)((tree_check ((decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 683, __FUNCTION__, (FIELD_DECL)))->base.u.bits.packed_flag ); | |||
684 | unsigned int mfa; | |||
685 | ||||
686 | if (DECL_BIT_FIELD (decl)((tree_check ((decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 686, __FUNCTION__, (FIELD_DECL)))->decl_common.decl_flag_1 )) | |||
687 | { | |||
688 | DECL_BIT_FIELD_TYPE (decl)((tree_check ((decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 688, __FUNCTION__, (FIELD_DECL)))->field_decl.bit_field_type ) = type; | |||
689 | ||||
690 | /* A zero-length bit-field affects the alignment of the next | |||
691 | field. In essence such bit-fields are not influenced by | |||
692 | any packing due to #pragma pack or attribute packed. */ | |||
693 | if (integer_zerop (DECL_SIZE (decl)((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 693, __FUNCTION__))->decl_common.size)) | |||
694 | && ! targetm.ms_bitfield_layout_p (DECL_FIELD_CONTEXT (decl)((tree_check ((decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 694, __FUNCTION__, (FIELD_DECL)))->decl_minimal.context))) | |||
695 | { | |||
696 | zero_bitfield = true; | |||
697 | packed_p = false; | |||
698 | if (PCC_BITFIELD_TYPE_MATTERS1) | |||
699 | do_type_align (type, decl); | |||
700 | else | |||
701 | { | |||
702 | #ifdef EMPTY_FIELD_BOUNDARY | |||
703 | if (EMPTY_FIELD_BOUNDARY > DECL_ALIGN (decl)(((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 703, __FUNCTION__))->decl_common.align) ? ((unsigned)1) << (((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 703, __FUNCTION__))->decl_common.align) - 1) : 0)) | |||
704 | { | |||
705 | SET_DECL_ALIGN (decl, EMPTY_FIELD_BOUNDARY)(((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 705, __FUNCTION__))->decl_common.align) = ffs_hwi (EMPTY_FIELD_BOUNDARY )); | |||
706 | DECL_USER_ALIGN (decl)((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 706, __FUNCTION__))->base.u.bits.user_align) = 0; | |||
707 | } | |||
708 | #endif | |||
709 | } | |||
710 | } | |||
711 | ||||
712 | /* See if we can use an ordinary integer mode for a bit-field. | |||
713 | Conditions are: a fixed size that is correct for another mode, | |||
714 | occupying a complete byte or bytes on proper boundary. */ | |||
715 | if (TYPE_SIZE (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 715, __FUNCTION__))->type_common.size) != 0 | |||
716 | && TREE_CODE (TYPE_SIZE (type))((enum tree_code) (((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 716, __FUNCTION__))->type_common.size))->base.code) == INTEGER_CST | |||
717 | && GET_MODE_CLASS (TYPE_MODE (type))((enum mode_class) mode_class[((((enum tree_code) ((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 717, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (type) : (type)->type_common.mode)]) == MODE_INT) | |||
718 | { | |||
719 | machine_mode xmode; | |||
720 | if (mode_for_size_tree (DECL_SIZE (decl)((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 720, __FUNCTION__))->decl_common.size), | |||
721 | MODE_INT, 1).exists (&xmode)) | |||
722 | { | |||
723 | unsigned int xalign = GET_MODE_ALIGNMENT (xmode)get_mode_alignment (xmode); | |||
724 | if (!(xalign > BITS_PER_UNIT(8) && DECL_PACKED (decl)((tree_check ((decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 724, __FUNCTION__, (FIELD_DECL)))->base.u.bits.packed_flag )) | |||
725 | && (known_align == 0 || known_align >= xalign)) | |||
726 | { | |||
727 | SET_DECL_ALIGN (decl, MAX (xalign, DECL_ALIGN (decl)))(((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 727, __FUNCTION__))->decl_common.align) = ffs_hwi (((xalign ) > ((((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 727, __FUNCTION__))->decl_common.align) ? ((unsigned)1) << (((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 727, __FUNCTION__))->decl_common.align) - 1) : 0)) ? (xalign ) : ((((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 727, __FUNCTION__))->decl_common.align) ? ((unsigned)1) << (((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 727, __FUNCTION__))->decl_common.align) - 1) : 0))))); | |||
728 | SET_DECL_MODE (decl, xmode)((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 728, __FUNCTION__))->decl_common.mode = (xmode)); | |||
729 | DECL_BIT_FIELD (decl)((tree_check ((decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 729, __FUNCTION__, (FIELD_DECL)))->decl_common.decl_flag_1 ) = 0; | |||
730 | } | |||
731 | } | |||
732 | } | |||
733 | ||||
734 | /* Turn off DECL_BIT_FIELD if we won't need it set. */ | |||
735 | if (TYPE_MODE (type)((((enum tree_code) ((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 735, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (type) : (type)->type_common.mode) == BLKmode((void) 0, E_BLKmode) && DECL_MODE (decl)((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 735, __FUNCTION__))->decl_common.mode) == BLKmode((void) 0, E_BLKmode) | |||
736 | && known_align >= TYPE_ALIGN (type)(((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 736, __FUNCTION__))->type_common.align) ? ((unsigned)1) << (((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 736, __FUNCTION__))->type_common.align) - 1) : 0) | |||
737 | && DECL_ALIGN (decl)(((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 737, __FUNCTION__))->decl_common.align) ? ((unsigned)1) << (((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 737, __FUNCTION__))->decl_common.align) - 1) : 0) >= TYPE_ALIGN (type)(((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 737, __FUNCTION__))->type_common.align) ? ((unsigned)1) << (((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 737, __FUNCTION__))->type_common.align) - 1) : 0)) | |||
738 | DECL_BIT_FIELD (decl)((tree_check ((decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 738, __FUNCTION__, (FIELD_DECL)))->decl_common.decl_flag_1 ) = 0; | |||
739 | } | |||
740 | else if (packed_p && DECL_USER_ALIGN (decl)((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 740, __FUNCTION__))->base.u.bits.user_align)) | |||
741 | /* Don't touch DECL_ALIGN. For other packed fields, go ahead and | |||
742 | round up; we'll reduce it again below. We want packing to | |||
743 | supersede USER_ALIGN inherited from the type, but defer to | |||
744 | alignment explicitly specified on the field decl. */; | |||
745 | else | |||
746 | do_type_align (type, decl); | |||
747 | ||||
748 | /* If the field is packed and not explicitly aligned, give it the | |||
749 | minimum alignment. Note that do_type_align may set | |||
750 | DECL_USER_ALIGN, so we need to check old_user_align instead. */ | |||
751 | if (packed_p | |||
752 | && !old_user_align) | |||
753 | SET_DECL_ALIGN (decl, MIN (DECL_ALIGN (decl), BITS_PER_UNIT))(((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 753, __FUNCTION__))->decl_common.align) = ffs_hwi (((((( contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 753, __FUNCTION__))->decl_common.align) ? ((unsigned)1) << (((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 753, __FUNCTION__))->decl_common.align) - 1) : 0)) < ( (8)) ? ((((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 753, __FUNCTION__))->decl_common.align) ? ((unsigned)1) << (((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 753, __FUNCTION__))->decl_common.align) - 1) : 0)) : ((8 ))))); | |||
754 | ||||
755 | if (! packed_p && ! DECL_USER_ALIGN (decl)((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 755, __FUNCTION__))->base.u.bits.user_align)) | |||
756 | { | |||
757 | /* Some targets (i.e. i386, VMS) limit struct field alignment | |||
758 | to a lower boundary than alignment of variables unless | |||
759 | it was overridden by attribute aligned. */ | |||
760 | #ifdef BIGGEST_FIELD_ALIGNMENT | |||
761 | SET_DECL_ALIGN (decl, MIN (DECL_ALIGN (decl),(((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 762, __FUNCTION__))->decl_common.align) = ffs_hwi (((((( contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 761, __FUNCTION__))->decl_common.align) ? ((unsigned)1) << (((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 761, __FUNCTION__))->decl_common.align) - 1) : 0)) < ( (unsigned) BIGGEST_FIELD_ALIGNMENT) ? ((((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 761, __FUNCTION__))->decl_common.align) ? ((unsigned)1) << (((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 761, __FUNCTION__))->decl_common.align) - 1) : 0)) : ((unsigned ) BIGGEST_FIELD_ALIGNMENT)))) | |||
762 | (unsigned) BIGGEST_FIELD_ALIGNMENT))(((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 762, __FUNCTION__))->decl_common.align) = ffs_hwi (((((( contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 761, __FUNCTION__))->decl_common.align) ? ((unsigned)1) << (((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 761, __FUNCTION__))->decl_common.align) - 1) : 0)) < ( (unsigned) BIGGEST_FIELD_ALIGNMENT) ? ((((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 761, __FUNCTION__))->decl_common.align) ? ((unsigned)1) << (((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 761, __FUNCTION__))->decl_common.align) - 1) : 0)) : ((unsigned ) BIGGEST_FIELD_ALIGNMENT)))); | |||
763 | #endif | |||
764 | #ifdef ADJUST_FIELD_ALIGN | |||
765 | SET_DECL_ALIGN (decl, ADJUST_FIELD_ALIGN (decl, TREE_TYPE (decl),(((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 766, __FUNCTION__))->decl_common.align) = ffs_hwi (x86_field_alignment ((((contains_struct_check ((decl), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 765, __FUNCTION__))->typed.type)), ((((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 766, __FUNCTION__))->decl_common.align) ? ((unsigned)1) << (((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 766, __FUNCTION__))->decl_common.align) - 1) : 0))))) | |||
766 | DECL_ALIGN (decl)))(((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 766, __FUNCTION__))->decl_common.align) = ffs_hwi (x86_field_alignment ((((contains_struct_check ((decl), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 765, __FUNCTION__))->typed.type)), ((((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 766, __FUNCTION__))->decl_common.align) ? ((unsigned)1) << (((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 766, __FUNCTION__))->decl_common.align) - 1) : 0))))); | |||
767 | #endif | |||
768 | } | |||
769 | ||||
770 | if (zero_bitfield) | |||
771 | mfa = initial_max_fld_alignglobal_options.x_initial_max_fld_align * BITS_PER_UNIT(8); | |||
772 | else | |||
773 | mfa = maximum_field_alignment; | |||
774 | /* Should this be controlled by DECL_USER_ALIGN, too? */ | |||
775 | if (mfa != 0) | |||
776 | SET_DECL_ALIGN (decl, MIN (DECL_ALIGN (decl), mfa))(((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 776, __FUNCTION__))->decl_common.align) = ffs_hwi (((((( contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 776, __FUNCTION__))->decl_common.align) ? ((unsigned)1) << (((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 776, __FUNCTION__))->decl_common.align) - 1) : 0)) < ( mfa) ? ((((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 776, __FUNCTION__))->decl_common.align) ? ((unsigned)1) << (((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 776, __FUNCTION__))->decl_common.align) - 1) : 0)) : (mfa )))); | |||
777 | } | |||
778 | ||||
779 | /* Evaluate nonconstant size only once, either now or as soon as safe. */ | |||
780 | if (DECL_SIZE (decl)((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 780, __FUNCTION__))->decl_common.size) != 0 && TREE_CODE (DECL_SIZE (decl))((enum tree_code) (((contains_struct_check ((decl), (TS_DECL_COMMON ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 780, __FUNCTION__))->decl_common.size))->base.code) != INTEGER_CST) | |||
781 | DECL_SIZE (decl)((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 781, __FUNCTION__))->decl_common.size) = variable_size (DECL_SIZE (decl)((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 781, __FUNCTION__))->decl_common.size)); | |||
782 | if (DECL_SIZE_UNIT (decl)((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 782, __FUNCTION__))->decl_common.size_unit) != 0 | |||
783 | && TREE_CODE (DECL_SIZE_UNIT (decl))((enum tree_code) (((contains_struct_check ((decl), (TS_DECL_COMMON ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 783, __FUNCTION__))->decl_common.size_unit))->base.code ) != INTEGER_CST) | |||
784 | DECL_SIZE_UNIT (decl)((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 784, __FUNCTION__))->decl_common.size_unit) = variable_size (DECL_SIZE_UNIT (decl)((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 784, __FUNCTION__))->decl_common.size_unit)); | |||
785 | ||||
786 | /* If requested, warn about definitions of large data objects. */ | |||
787 | if ((code == PARM_DECL || (code == VAR_DECL && !DECL_NONLOCAL_FRAME (decl)((tree_check ((decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 787, __FUNCTION__, (VAR_DECL)))->base.default_def_flag))) | |||
788 | && !DECL_EXTERNAL (decl)((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 788, __FUNCTION__))->decl_common.decl_flag_1)) | |||
789 | { | |||
790 | tree size = DECL_SIZE_UNIT (decl)((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 790, __FUNCTION__))->decl_common.size_unit); | |||
791 | ||||
792 | if (size != 0 && TREE_CODE (size)((enum tree_code) (size)->base.code) == INTEGER_CST) | |||
793 | { | |||
794 | /* -Wlarger-than= argument of HOST_WIDE_INT_MAX is treated | |||
795 | as if PTRDIFF_MAX had been specified, with the value | |||
796 | being that on the target rather than the host. */ | |||
797 | unsigned HOST_WIDE_INTlong max_size = warn_larger_than_sizeglobal_options.x_warn_larger_than_size; | |||
798 | if (max_size == HOST_WIDE_INT_MAX(~((long) (1UL << (64 - 1))))) | |||
799 | max_size = tree_to_shwi (TYPE_MAX_VALUE (ptrdiff_type_node)((tree_check5 ((global_trees[TI_PTRDIFF_TYPE]), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 799, __FUNCTION__, (INTEGER_TYPE), (ENUMERAL_TYPE), (BOOLEAN_TYPE ), (REAL_TYPE), (FIXED_POINT_TYPE)))->type_non_common.maxval )); | |||
800 | ||||
801 | if (compare_tree_int (size, max_size) > 0) | |||
802 | warning (OPT_Wlarger_than_, "size of %q+D %E bytes exceeds " | |||
803 | "maximum object size %wu", | |||
804 | decl, size, max_size); | |||
805 | } | |||
806 | } | |||
807 | ||||
808 | /* If the RTL was already set, update its mode and mem attributes. */ | |||
809 | if (rtl) | |||
810 | { | |||
811 | PUT_MODE (rtl, DECL_MODE (decl)((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 811, __FUNCTION__))->decl_common.mode)); | |||
812 | SET_DECL_RTL (decl, 0)set_decl_rtl (decl, 0); | |||
813 | if (MEM_P (rtl)(((enum rtx_code) (rtl)->code) == MEM)) | |||
814 | set_mem_attributes (rtl, decl, 1); | |||
815 | SET_DECL_RTL (decl, rtl)set_decl_rtl (decl, rtl); | |||
816 | } | |||
817 | } | |||
818 | ||||
819 | /* Given a VAR_DECL, PARM_DECL, RESULT_DECL, or FIELD_DECL, clears the | |||
820 | results of a previous call to layout_decl and calls it again. */ | |||
821 | ||||
822 | void | |||
823 | relayout_decl (tree decl) | |||
824 | { | |||
825 | DECL_SIZE (decl)((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 825, __FUNCTION__))->decl_common.size) = DECL_SIZE_UNIT (decl)((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 825, __FUNCTION__))->decl_common.size_unit) = 0; | |||
826 | SET_DECL_MODE (decl, VOIDmode)((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 826, __FUNCTION__))->decl_common.mode = (((void) 0, E_VOIDmode ))); | |||
827 | if (!DECL_USER_ALIGN (decl)((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 827, __FUNCTION__))->base.u.bits.user_align)) | |||
828 | SET_DECL_ALIGN (decl, 0)(((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 828, __FUNCTION__))->decl_common.align) = ffs_hwi (0)); | |||
829 | if (DECL_RTL_SET_P (decl)(((tree_contains_struct[(((enum tree_code) (decl)->base.code ))][(TS_DECL_WRTL)])) && (contains_struct_check ((decl ), (TS_DECL_WRTL), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 829, __FUNCTION__))->decl_with_rtl.rtl != nullptr)) | |||
830 | SET_DECL_RTL (decl, 0)set_decl_rtl (decl, 0); | |||
831 | ||||
832 | layout_decl (decl, 0); | |||
833 | } | |||
834 | ||||
835 | /* Begin laying out type T, which may be a RECORD_TYPE, UNION_TYPE, or | |||
836 | QUAL_UNION_TYPE. Return a pointer to a struct record_layout_info which | |||
837 | is to be passed to all other layout functions for this record. It is the | |||
838 | responsibility of the caller to call `free' for the storage returned. | |||
839 | Note that garbage collection is not permitted until we finish laying | |||
840 | out the record. */ | |||
841 | ||||
842 | record_layout_info | |||
843 | start_record_layout (tree t) | |||
844 | { | |||
845 | record_layout_info rli = XNEW (struct record_layout_info_s)((struct record_layout_info_s *) xmalloc (sizeof (struct record_layout_info_s ))); | |||
846 | ||||
847 | rli->t = t; | |||
848 | ||||
849 | /* If the type has a minimum specified alignment (via an attribute | |||
850 | declaration, for example) use it -- otherwise, start with a | |||
851 | one-byte alignment. */ | |||
852 | rli->record_align = MAX (BITS_PER_UNIT, TYPE_ALIGN (t))(((8)) > ((((tree_class_check ((t), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 852, __FUNCTION__))->type_common.align) ? ((unsigned)1) << (((tree_class_check ((t), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 852, __FUNCTION__))->type_common.align) - 1) : 0)) ? ((8 )) : ((((tree_class_check ((t), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 852, __FUNCTION__))->type_common.align) ? ((unsigned)1) << (((tree_class_check ((t), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 852, __FUNCTION__))->type_common.align) - 1) : 0))); | |||
853 | rli->unpacked_align = rli->record_align; | |||
854 | rli->offset_align = MAX (rli->record_align, BIGGEST_ALIGNMENT)((rli->record_align) > ((((global_options.x_target_flags & (1U << 12)) != 0) ? 32 : (((global_options.x_ix86_isa_flags & (1UL << 15)) != 0) ? 512 : (((global_options.x_ix86_isa_flags & (1UL << 8)) != 0) ? 256 : 128)))) ? (rli->record_align ) : ((((global_options.x_target_flags & (1U << 12)) != 0) ? 32 : (((global_options.x_ix86_isa_flags & (1UL << 15)) != 0) ? 512 : (((global_options.x_ix86_isa_flags & ( 1UL << 8)) != 0) ? 256 : 128))))); | |||
855 | ||||
856 | #ifdef STRUCTURE_SIZE_BOUNDARY | |||
857 | /* Packed structures don't need to have minimum size. */ | |||
858 | if (! TYPE_PACKED (t)((tree_class_check ((t), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 858, __FUNCTION__))->base.u.bits.packed_flag)) | |||
859 | { | |||
860 | unsigned tmp; | |||
861 | ||||
862 | /* #pragma pack overrides STRUCTURE_SIZE_BOUNDARY. */ | |||
863 | tmp = (unsigned) STRUCTURE_SIZE_BOUNDARY; | |||
864 | if (maximum_field_alignment != 0) | |||
865 | tmp = MIN (tmp, maximum_field_alignment)((tmp) < (maximum_field_alignment) ? (tmp) : (maximum_field_alignment )); | |||
866 | rli->record_align = MAX (rli->record_align, tmp)((rli->record_align) > (tmp) ? (rli->record_align) : (tmp)); | |||
867 | } | |||
868 | #endif | |||
869 | ||||
870 | rli->offset = size_zero_nodeglobal_trees[TI_SIZE_ZERO]; | |||
871 | rli->bitpos = bitsize_zero_nodeglobal_trees[TI_BITSIZE_ZERO]; | |||
872 | rli->prev_field = 0; | |||
873 | rli->pending_statics = 0; | |||
874 | rli->packed_maybe_necessary = 0; | |||
875 | rli->remaining_in_alignment = 0; | |||
876 | ||||
877 | return rli; | |||
878 | } | |||
879 | ||||
880 | /* Fold sizetype value X to bitsizetype, given that X represents a type | |||
881 | size or offset. */ | |||
882 | ||||
883 | static tree | |||
884 | bits_from_bytes (tree x) | |||
885 | { | |||
886 | if (POLY_INT_CST_P (x)(1 > 1 && ((enum tree_code) (x)->base.code) == POLY_INT_CST )) | |||
887 | /* The runtime calculation isn't allowed to overflow sizetype; | |||
888 | increasing the runtime values must always increase the size | |||
889 | or offset of the object. This means that the object imposes | |||
890 | a maximum value on the runtime parameters, but we don't record | |||
891 | what that is. */ | |||
892 | return build_poly_int_cst | |||
893 | (bitsizetypesizetype_tab[(int) stk_bitsizetype], | |||
894 | poly_wide_int::from (poly_int_cst_value (x), | |||
895 | TYPE_PRECISION (bitsizetype)((tree_class_check ((sizetype_tab[(int) stk_bitsizetype]), (tcc_type ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 895, __FUNCTION__))->type_common.precision), | |||
896 | TYPE_SIGN (TREE_TYPE (x))((signop) ((tree_class_check ((((contains_struct_check ((x), ( TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 896, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 896, __FUNCTION__))->base.u.bits.unsigned_flag)))); | |||
897 | x = fold_convert (bitsizetype, x)fold_convert_loc (((location_t) 0), sizetype_tab[(int) stk_bitsizetype ], x); | |||
898 | gcc_checking_assert (x)((void)(!(x) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 898, __FUNCTION__), 0 : 0)); | |||
899 | return x; | |||
900 | } | |||
901 | ||||
902 | /* Return the combined bit position for the byte offset OFFSET and the | |||
903 | bit position BITPOS. | |||
904 | ||||
905 | These functions operate on byte and bit positions present in FIELD_DECLs | |||
906 | and assume that these expressions result in no (intermediate) overflow. | |||
907 | This assumption is necessary to fold the expressions as much as possible, | |||
908 | so as to avoid creating artificially variable-sized types in languages | |||
909 | supporting variable-sized types like Ada. */ | |||
910 | ||||
911 | tree | |||
912 | bit_from_pos (tree offset, tree bitpos) | |||
913 | { | |||
914 | return size_binop (PLUS_EXPR, bitpos,size_binop_loc (((location_t) 0), PLUS_EXPR, bitpos, size_binop_loc (((location_t) 0), MULT_EXPR, bits_from_bytes (offset), global_trees [TI_BITSIZE_UNIT])) | |||
915 | size_binop (MULT_EXPR, bits_from_bytes (offset),size_binop_loc (((location_t) 0), PLUS_EXPR, bitpos, size_binop_loc (((location_t) 0), MULT_EXPR, bits_from_bytes (offset), global_trees [TI_BITSIZE_UNIT])) | |||
916 | bitsize_unit_node))size_binop_loc (((location_t) 0), PLUS_EXPR, bitpos, size_binop_loc (((location_t) 0), MULT_EXPR, bits_from_bytes (offset), global_trees [TI_BITSIZE_UNIT])); | |||
917 | } | |||
918 | ||||
919 | /* Return the combined truncated byte position for the byte offset OFFSET and | |||
920 | the bit position BITPOS. */ | |||
921 | ||||
922 | tree | |||
923 | byte_from_pos (tree offset, tree bitpos) | |||
924 | { | |||
925 | tree bytepos; | |||
926 | if (TREE_CODE (bitpos)((enum tree_code) (bitpos)->base.code) == MULT_EXPR | |||
927 | && tree_int_cst_equal (TREE_OPERAND (bitpos, 1)(*((const_cast<tree*> (tree_operand_check ((bitpos), (1 ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 927, __FUNCTION__))))), bitsize_unit_nodeglobal_trees[TI_BITSIZE_UNIT])) | |||
928 | bytepos = TREE_OPERAND (bitpos, 0)(*((const_cast<tree*> (tree_operand_check ((bitpos), (0 ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 928, __FUNCTION__))))); | |||
929 | else | |||
930 | bytepos = size_binop (TRUNC_DIV_EXPR, bitpos, bitsize_unit_node)size_binop_loc (((location_t) 0), TRUNC_DIV_EXPR, bitpos, global_trees [TI_BITSIZE_UNIT]); | |||
931 | return size_binop (PLUS_EXPR, offset, fold_convert (sizetype, bytepos))size_binop_loc (((location_t) 0), PLUS_EXPR, offset, fold_convert_loc (((location_t) 0), sizetype_tab[(int) stk_sizetype], bytepos )); | |||
932 | } | |||
933 | ||||
934 | /* Split the bit position POS into a byte offset *POFFSET and a bit | |||
935 | position *PBITPOS with the byte offset aligned to OFF_ALIGN bits. */ | |||
936 | ||||
937 | void | |||
938 | pos_from_bit (tree *poffset, tree *pbitpos, unsigned int off_align, | |||
939 | tree pos) | |||
940 | { | |||
941 | tree toff_align = bitsize_int (off_align)size_int_kind (off_align, stk_bitsizetype); | |||
942 | if (TREE_CODE (pos)((enum tree_code) (pos)->base.code) == MULT_EXPR | |||
943 | && tree_int_cst_equal (TREE_OPERAND (pos, 1)(*((const_cast<tree*> (tree_operand_check ((pos), (1), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 943, __FUNCTION__))))), toff_align)) | |||
944 | { | |||
945 | *poffset = size_binop (MULT_EXPR,size_binop_loc (((location_t) 0), MULT_EXPR, fold_convert_loc (((location_t) 0), sizetype_tab[(int) stk_sizetype], (*((const_cast <tree*> (tree_operand_check ((pos), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 946, __FUNCTION__)))))), size_int_kind (off_align / (8), stk_sizetype )) | |||
946 | fold_convert (sizetype, TREE_OPERAND (pos, 0)),size_binop_loc (((location_t) 0), MULT_EXPR, fold_convert_loc (((location_t) 0), sizetype_tab[(int) stk_sizetype], (*((const_cast <tree*> (tree_operand_check ((pos), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 946, __FUNCTION__)))))), size_int_kind (off_align / (8), stk_sizetype )) | |||
947 | size_int (off_align / BITS_PER_UNIT))size_binop_loc (((location_t) 0), MULT_EXPR, fold_convert_loc (((location_t) 0), sizetype_tab[(int) stk_sizetype], (*((const_cast <tree*> (tree_operand_check ((pos), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 946, __FUNCTION__)))))), size_int_kind (off_align / (8), stk_sizetype )); | |||
948 | *pbitpos = bitsize_zero_nodeglobal_trees[TI_BITSIZE_ZERO]; | |||
949 | } | |||
950 | else | |||
951 | { | |||
952 | *poffset = size_binop (MULT_EXPR,size_binop_loc (((location_t) 0), MULT_EXPR, fold_convert_loc (((location_t) 0), sizetype_tab[(int) stk_sizetype], size_binop_loc (((location_t) 0), FLOOR_DIV_EXPR, pos, toff_align)), size_int_kind (off_align / (8), stk_sizetype)) | |||
953 | fold_convert (sizetype,size_binop_loc (((location_t) 0), MULT_EXPR, fold_convert_loc (((location_t) 0), sizetype_tab[(int) stk_sizetype], size_binop_loc (((location_t) 0), FLOOR_DIV_EXPR, pos, toff_align)), size_int_kind (off_align / (8), stk_sizetype)) | |||
954 | size_binop (FLOOR_DIV_EXPR, pos,size_binop_loc (((location_t) 0), MULT_EXPR, fold_convert_loc (((location_t) 0), sizetype_tab[(int) stk_sizetype], size_binop_loc (((location_t) 0), FLOOR_DIV_EXPR, pos, toff_align)), size_int_kind (off_align / (8), stk_sizetype)) | |||
955 | toff_align)),size_binop_loc (((location_t) 0), MULT_EXPR, fold_convert_loc (((location_t) 0), sizetype_tab[(int) stk_sizetype], size_binop_loc (((location_t) 0), FLOOR_DIV_EXPR, pos, toff_align)), size_int_kind (off_align / (8), stk_sizetype)) | |||
956 | size_int (off_align / BITS_PER_UNIT))size_binop_loc (((location_t) 0), MULT_EXPR, fold_convert_loc (((location_t) 0), sizetype_tab[(int) stk_sizetype], size_binop_loc (((location_t) 0), FLOOR_DIV_EXPR, pos, toff_align)), size_int_kind (off_align / (8), stk_sizetype)); | |||
957 | *pbitpos = size_binop (FLOOR_MOD_EXPR, pos, toff_align)size_binop_loc (((location_t) 0), FLOOR_MOD_EXPR, pos, toff_align ); | |||
958 | } | |||
959 | } | |||
960 | ||||
961 | /* Given a pointer to bit and byte offsets and an offset alignment, | |||
962 | normalize the offsets so they are within the alignment. */ | |||
963 | ||||
964 | void | |||
965 | normalize_offset (tree *poffset, tree *pbitpos, unsigned int off_align) | |||
966 | { | |||
967 | /* If the bit position is now larger than it should be, adjust it | |||
968 | downwards. */ | |||
969 | if (compare_tree_int (*pbitpos, off_align) >= 0) | |||
970 | { | |||
971 | tree offset, bitpos; | |||
972 | pos_from_bit (&offset, &bitpos, off_align, *pbitpos); | |||
973 | *poffset = size_binop (PLUS_EXPR, *poffset, offset)size_binop_loc (((location_t) 0), PLUS_EXPR, *poffset, offset ); | |||
974 | *pbitpos = bitpos; | |||
975 | } | |||
976 | } | |||
977 | ||||
978 | /* Print debugging information about the information in RLI. */ | |||
979 | ||||
980 | DEBUG_FUNCTION__attribute__ ((__used__)) void | |||
981 | debug_rli (record_layout_info rli) | |||
982 | { | |||
983 | print_node_brief (stderrstderr, "type", rli->t, 0); | |||
984 | print_node_brief (stderrstderr, "\noffset", rli->offset, 0); | |||
985 | print_node_brief (stderrstderr, " bitpos", rli->bitpos, 0); | |||
986 | ||||
987 | fprintf (stderrstderr, "\naligns: rec = %u, unpack = %u, off = %u\n", | |||
988 | rli->record_align, rli->unpacked_align, | |||
989 | rli->offset_align); | |||
990 | ||||
991 | /* The ms_struct code is the only that uses this. */ | |||
992 | if (targetm.ms_bitfield_layout_p (rli->t)) | |||
993 | fprintf (stderrstderr, "remaining in alignment = %u\n", rli->remaining_in_alignment); | |||
994 | ||||
995 | if (rli->packed_maybe_necessary) | |||
996 | fprintf (stderrstderr, "packed may be necessary\n"); | |||
997 | ||||
998 | if (!vec_safe_is_empty (rli->pending_statics)) | |||
999 | { | |||
1000 | fprintf (stderrstderr, "pending statics:\n"); | |||
1001 | debug (rli->pending_statics); | |||
1002 | } | |||
1003 | } | |||
1004 | ||||
1005 | /* Given an RLI with a possibly-incremented BITPOS, adjust OFFSET and | |||
1006 | BITPOS if necessary to keep BITPOS below OFFSET_ALIGN. */ | |||
1007 | ||||
1008 | void | |||
1009 | normalize_rli (record_layout_info rli) | |||
1010 | { | |||
1011 | normalize_offset (&rli->offset, &rli->bitpos, rli->offset_align); | |||
1012 | } | |||
1013 | ||||
1014 | /* Returns the size in bytes allocated so far. */ | |||
1015 | ||||
1016 | tree | |||
1017 | rli_size_unit_so_far (record_layout_info rli) | |||
1018 | { | |||
1019 | return byte_from_pos (rli->offset, rli->bitpos); | |||
1020 | } | |||
1021 | ||||
1022 | /* Returns the size in bits allocated so far. */ | |||
1023 | ||||
1024 | tree | |||
1025 | rli_size_so_far (record_layout_info rli) | |||
1026 | { | |||
1027 | return bit_from_pos (rli->offset, rli->bitpos); | |||
1028 | } | |||
1029 | ||||
1030 | /* FIELD is about to be added to RLI->T. The alignment (in bits) of | |||
1031 | the next available location within the record is given by KNOWN_ALIGN. | |||
1032 | Update the variable alignment fields in RLI, and return the alignment | |||
1033 | to give the FIELD. */ | |||
1034 | ||||
1035 | unsigned int | |||
1036 | update_alignment_for_field (record_layout_info rli, tree field, | |||
1037 | unsigned int known_align) | |||
1038 | { | |||
1039 | /* The alignment required for FIELD. */ | |||
1040 | unsigned int desired_align; | |||
1041 | /* The type of this field. */ | |||
1042 | tree type = TREE_TYPE (field)((contains_struct_check ((field), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1042, __FUNCTION__))->typed.type); | |||
1043 | /* True if the field was explicitly aligned by the user. */ | |||
1044 | bool user_align; | |||
1045 | bool is_bitfield; | |||
1046 | ||||
1047 | /* Do not attempt to align an ERROR_MARK node */ | |||
1048 | if (TREE_CODE (type)((enum tree_code) (type)->base.code) == ERROR_MARK) | |||
1049 | return 0; | |||
1050 | ||||
1051 | /* Lay out the field so we know what alignment it needs. */ | |||
1052 | layout_decl (field, known_align); | |||
1053 | desired_align = DECL_ALIGN (field)(((contains_struct_check ((field), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1053, __FUNCTION__))->decl_common.align) ? ((unsigned)1) << (((contains_struct_check ((field), (TS_DECL_COMMON) , "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1053, __FUNCTION__))->decl_common.align) - 1) : 0); | |||
1054 | user_align = DECL_USER_ALIGN (field)((contains_struct_check ((field), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1054, __FUNCTION__))->base.u.bits.user_align); | |||
1055 | ||||
1056 | is_bitfield = (type != error_mark_nodeglobal_trees[TI_ERROR_MARK] | |||
1057 | && DECL_BIT_FIELD_TYPE (field)((tree_check ((field), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1057, __FUNCTION__, (FIELD_DECL)))->field_decl.bit_field_type ) | |||
1058 | && ! integer_zerop (TYPE_SIZE (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1058, __FUNCTION__))->type_common.size))); | |||
1059 | ||||
1060 | /* Record must have at least as much alignment as any field. | |||
1061 | Otherwise, the alignment of the field within the record is | |||
1062 | meaningless. */ | |||
1063 | if (targetm.ms_bitfield_layout_p (rli->t)) | |||
1064 | { | |||
1065 | /* Here, the alignment of the underlying type of a bitfield can | |||
1066 | affect the alignment of a record; even a zero-sized field | |||
1067 | can do this. The alignment should be to the alignment of | |||
1068 | the type, except that for zero-size bitfields this only | |||
1069 | applies if there was an immediately prior, nonzero-size | |||
1070 | bitfield. (That's the way it is, experimentally.) */ | |||
1071 | if (!is_bitfield | |||
1072 | || ((DECL_SIZE (field)((contains_struct_check ((field), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1072, __FUNCTION__))->decl_common.size) == NULL_TREE(tree) nullptr | |||
1073 | || !integer_zerop (DECL_SIZE (field)((contains_struct_check ((field), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1073, __FUNCTION__))->decl_common.size))) | |||
1074 | ? !DECL_PACKED (field)((tree_check ((field), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1074, __FUNCTION__, (FIELD_DECL)))->base.u.bits.packed_flag ) | |||
1075 | : (rli->prev_field | |||
1076 | && DECL_BIT_FIELD_TYPE (rli->prev_field)((tree_check ((rli->prev_field), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1076, __FUNCTION__, (FIELD_DECL)))->field_decl.bit_field_type ) | |||
1077 | && ! integer_zerop (DECL_SIZE (rli->prev_field)((contains_struct_check ((rli->prev_field), (TS_DECL_COMMON ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1077, __FUNCTION__))->decl_common.size))))) | |||
1078 | { | |||
1079 | unsigned int type_align = TYPE_ALIGN (type)(((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1079, __FUNCTION__))->type_common.align) ? ((unsigned)1) << (((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1079, __FUNCTION__))->type_common.align) - 1) : 0); | |||
1080 | if (!is_bitfield && DECL_PACKED (field)((tree_check ((field), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1080, __FUNCTION__, (FIELD_DECL)))->base.u.bits.packed_flag )) | |||
1081 | type_align = desired_align; | |||
1082 | else | |||
1083 | type_align = MAX (type_align, desired_align)((type_align) > (desired_align) ? (type_align) : (desired_align )); | |||
1084 | if (maximum_field_alignment != 0) | |||
1085 | type_align = MIN (type_align, maximum_field_alignment)((type_align) < (maximum_field_alignment) ? (type_align) : (maximum_field_alignment)); | |||
1086 | rli->record_align = MAX (rli->record_align, type_align)((rli->record_align) > (type_align) ? (rli->record_align ) : (type_align)); | |||
1087 | rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type))((rli->unpacked_align) > ((((tree_class_check ((type), ( tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1087, __FUNCTION__))->type_common.align) ? ((unsigned)1) << (((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1087, __FUNCTION__))->type_common.align) - 1) : 0)) ? (rli ->unpacked_align) : ((((tree_class_check ((type), (tcc_type ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1087, __FUNCTION__))->type_common.align) ? ((unsigned)1) << (((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1087, __FUNCTION__))->type_common.align) - 1) : 0))); | |||
1088 | } | |||
1089 | } | |||
1090 | else if (is_bitfield && PCC_BITFIELD_TYPE_MATTERS1) | |||
1091 | { | |||
1092 | /* Named bit-fields cause the entire structure to have the | |||
1093 | alignment implied by their type. Some targets also apply the same | |||
1094 | rules to unnamed bitfields. */ | |||
1095 | if (DECL_NAME (field)((contains_struct_check ((field), (TS_DECL_MINIMAL), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1095, __FUNCTION__))->decl_minimal.name) != 0 | |||
1096 | || targetm.align_anon_bitfield ()) | |||
1097 | { | |||
1098 | unsigned int type_align = TYPE_ALIGN (type)(((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1098, __FUNCTION__))->type_common.align) ? ((unsigned)1) << (((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1098, __FUNCTION__))->type_common.align) - 1) : 0); | |||
1099 | ||||
1100 | #ifdef ADJUST_FIELD_ALIGN | |||
1101 | if (! TYPE_USER_ALIGN (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1101, __FUNCTION__))->base.u.bits.user_align)) | |||
1102 | type_align = ADJUST_FIELD_ALIGN (field, type, type_align)x86_field_alignment ((type), (type_align)); | |||
1103 | #endif | |||
1104 | ||||
1105 | /* Targets might chose to handle unnamed and hence possibly | |||
1106 | zero-width bitfield. Those are not influenced by #pragmas | |||
1107 | or packed attributes. */ | |||
1108 | if (integer_zerop (DECL_SIZE (field)((contains_struct_check ((field), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1108, __FUNCTION__))->decl_common.size))) | |||
1109 | { | |||
1110 | if (initial_max_fld_alignglobal_options.x_initial_max_fld_align) | |||
1111 | type_align = MIN (type_align,((type_align) < (global_options.x_initial_max_fld_align * ( 8)) ? (type_align) : (global_options.x_initial_max_fld_align * (8))) | |||
1112 | initial_max_fld_align * BITS_PER_UNIT)((type_align) < (global_options.x_initial_max_fld_align * ( 8)) ? (type_align) : (global_options.x_initial_max_fld_align * (8))); | |||
1113 | } | |||
1114 | else if (maximum_field_alignment != 0) | |||
1115 | type_align = MIN (type_align, maximum_field_alignment)((type_align) < (maximum_field_alignment) ? (type_align) : (maximum_field_alignment)); | |||
1116 | else if (DECL_PACKED (field)((tree_check ((field), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1116, __FUNCTION__, (FIELD_DECL)))->base.u.bits.packed_flag )) | |||
1117 | type_align = MIN (type_align, BITS_PER_UNIT)((type_align) < ((8)) ? (type_align) : ((8))); | |||
1118 | ||||
1119 | /* The alignment of the record is increased to the maximum | |||
1120 | of the current alignment, the alignment indicated on the | |||
1121 | field (i.e., the alignment specified by an __aligned__ | |||
1122 | attribute), and the alignment indicated by the type of | |||
1123 | the field. */ | |||
1124 | rli->record_align = MAX (rli->record_align, desired_align)((rli->record_align) > (desired_align) ? (rli->record_align ) : (desired_align)); | |||
1125 | rli->record_align = MAX (rli->record_align, type_align)((rli->record_align) > (type_align) ? (rli->record_align ) : (type_align)); | |||
1126 | ||||
1127 | if (warn_packedglobal_options.x_warn_packed) | |||
1128 | rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type))((rli->unpacked_align) > ((((tree_class_check ((type), ( tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1128, __FUNCTION__))->type_common.align) ? ((unsigned)1) << (((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1128, __FUNCTION__))->type_common.align) - 1) : 0)) ? (rli ->unpacked_align) : ((((tree_class_check ((type), (tcc_type ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1128, __FUNCTION__))->type_common.align) ? ((unsigned)1) << (((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1128, __FUNCTION__))->type_common.align) - 1) : 0))); | |||
1129 | user_align |= TYPE_USER_ALIGN (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1129, __FUNCTION__))->base.u.bits.user_align); | |||
1130 | } | |||
1131 | } | |||
1132 | else | |||
1133 | { | |||
1134 | rli->record_align = MAX (rli->record_align, desired_align)((rli->record_align) > (desired_align) ? (rli->record_align ) : (desired_align)); | |||
1135 | rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type))((rli->unpacked_align) > ((((tree_class_check ((type), ( tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1135, __FUNCTION__))->type_common.align) ? ((unsigned)1) << (((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1135, __FUNCTION__))->type_common.align) - 1) : 0)) ? (rli ->unpacked_align) : ((((tree_class_check ((type), (tcc_type ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1135, __FUNCTION__))->type_common.align) ? ((unsigned)1) << (((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1135, __FUNCTION__))->type_common.align) - 1) : 0))); | |||
1136 | } | |||
1137 | ||||
1138 | TYPE_USER_ALIGN (rli->t)((tree_class_check ((rli->t), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1138, __FUNCTION__))->base.u.bits.user_align) |= user_align; | |||
1139 | ||||
1140 | return desired_align; | |||
1141 | } | |||
1142 | ||||
1143 | /* Issue a warning if the record alignment, RECORD_ALIGN, is less than | |||
1144 | the field alignment of FIELD or FIELD isn't aligned. */ | |||
1145 | ||||
1146 | static void | |||
1147 | handle_warn_if_not_align (tree field, unsigned int record_align) | |||
1148 | { | |||
1149 | tree type = TREE_TYPE (field)((contains_struct_check ((field), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1149, __FUNCTION__))->typed.type); | |||
1150 | ||||
1151 | if (type == error_mark_nodeglobal_trees[TI_ERROR_MARK]) | |||
1152 | return; | |||
1153 | ||||
1154 | unsigned int warn_if_not_align = 0; | |||
1155 | ||||
1156 | int opt_w = 0; | |||
1157 | ||||
1158 | if (warn_if_not_alignedglobal_options.x_warn_if_not_aligned) | |||
1159 | { | |||
1160 | warn_if_not_align = DECL_WARN_IF_NOT_ALIGN (field)(((contains_struct_check ((field), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1160, __FUNCTION__))->decl_common.warn_if_not_align) ? ( (unsigned)1) << (((contains_struct_check ((field), (TS_DECL_COMMON ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1160, __FUNCTION__))->decl_common.warn_if_not_align) - 1 ) : 0); | |||
1161 | if (!warn_if_not_align) | |||
1162 | warn_if_not_align = TYPE_WARN_IF_NOT_ALIGN (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1162, __FUNCTION__))->type_common.warn_if_not_align ? (( unsigned)1) << ((type)->type_common.warn_if_not_align - 1) : 0); | |||
1163 | if (warn_if_not_align) | |||
1164 | opt_w = OPT_Wif_not_aligned; | |||
1165 | } | |||
1166 | ||||
1167 | if (!warn_if_not_align | |||
1168 | && warn_packed_not_alignedglobal_options.x_warn_packed_not_aligned | |||
1169 | && lookup_attribute ("aligned", TYPE_ATTRIBUTES (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1169, __FUNCTION__))->type_common.attributes))) | |||
1170 | { | |||
1171 | warn_if_not_align = TYPE_ALIGN (type)(((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1171, __FUNCTION__))->type_common.align) ? ((unsigned)1) << (((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1171, __FUNCTION__))->type_common.align) - 1) : 0); | |||
1172 | opt_w = OPT_Wpacked_not_aligned; | |||
1173 | } | |||
1174 | ||||
1175 | if (!warn_if_not_align) | |||
1176 | return; | |||
1177 | ||||
1178 | tree context = DECL_CONTEXT (field)((contains_struct_check ((field), (TS_DECL_MINIMAL), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1178, __FUNCTION__))->decl_minimal.context); | |||
1179 | ||||
1180 | warn_if_not_align /= BITS_PER_UNIT(8); | |||
1181 | record_align /= BITS_PER_UNIT(8); | |||
1182 | if ((record_align % warn_if_not_align) != 0) | |||
1183 | warning (opt_w, "alignment %u of %qT is less than %u", | |||
1184 | record_align, context, warn_if_not_align); | |||
1185 | ||||
1186 | tree off = byte_position (field); | |||
1187 | if (!multiple_of_p (TREE_TYPE (off)((contains_struct_check ((off), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1187, __FUNCTION__))->typed.type), off, size_int (warn_if_not_align)size_int_kind (warn_if_not_align, stk_sizetype))) | |||
1188 | { | |||
1189 | if (TREE_CODE (off)((enum tree_code) (off)->base.code) == INTEGER_CST) | |||
1190 | warning (opt_w, "%q+D offset %E in %qT isn%'t aligned to %u", | |||
1191 | field, off, context, warn_if_not_align); | |||
1192 | else | |||
1193 | warning (opt_w, "%q+D offset %E in %qT may not be aligned to %u", | |||
1194 | field, off, context, warn_if_not_align); | |||
1195 | } | |||
1196 | } | |||
1197 | ||||
1198 | /* Called from place_field to handle unions. */ | |||
1199 | ||||
1200 | static void | |||
1201 | place_union_field (record_layout_info rli, tree field) | |||
1202 | { | |||
1203 | update_alignment_for_field (rli, field, /*known_align=*/0); | |||
1204 | ||||
1205 | DECL_FIELD_OFFSET (field)((tree_check ((field), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1205, __FUNCTION__, (FIELD_DECL)))->field_decl.offset) = size_zero_nodeglobal_trees[TI_SIZE_ZERO]; | |||
1206 | DECL_FIELD_BIT_OFFSET (field)((tree_check ((field), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1206, __FUNCTION__, (FIELD_DECL)))->field_decl.bit_offset ) = bitsize_zero_nodeglobal_trees[TI_BITSIZE_ZERO]; | |||
1207 | SET_DECL_OFFSET_ALIGN (field, BIGGEST_ALIGNMENT)((tree_check ((field), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1207, __FUNCTION__, (FIELD_DECL)))->decl_common.off_align = ffs_hwi ((((global_options.x_target_flags & (1U << 12)) != 0) ? 32 : (((global_options.x_ix86_isa_flags & ( 1UL << 15)) != 0) ? 512 : (((global_options.x_ix86_isa_flags & (1UL << 8)) != 0) ? 256 : 128)))) - 1); | |||
1208 | handle_warn_if_not_align (field, rli->record_align); | |||
1209 | ||||
1210 | /* If this is an ERROR_MARK return *after* having set the | |||
1211 | field at the start of the union. This helps when parsing | |||
1212 | invalid fields. */ | |||
1213 | if (TREE_CODE (TREE_TYPE (field))((enum tree_code) (((contains_struct_check ((field), (TS_TYPED ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1213, __FUNCTION__))->typed.type))->base.code) == ERROR_MARK) | |||
1214 | return; | |||
1215 | ||||
1216 | if (AGGREGATE_TYPE_P (TREE_TYPE (field))(((enum tree_code) (((contains_struct_check ((field), (TS_TYPED ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1216, __FUNCTION__))->typed.type))->base.code) == ARRAY_TYPE || (((enum tree_code) (((contains_struct_check ((field), (TS_TYPED ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1216, __FUNCTION__))->typed.type))->base.code) == RECORD_TYPE || ((enum tree_code) (((contains_struct_check ((field), (TS_TYPED ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1216, __FUNCTION__))->typed.type))->base.code) == UNION_TYPE || ((enum tree_code) (((contains_struct_check ((field), (TS_TYPED ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1216, __FUNCTION__))->typed.type))->base.code) == QUAL_UNION_TYPE )) | |||
1217 | && TYPE_TYPELESS_STORAGE (TREE_TYPE (field))((tree_check4 ((((contains_struct_check ((field), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1217, __FUNCTION__))->typed.type)), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1217, __FUNCTION__, (RECORD_TYPE), (UNION_TYPE), (QUAL_UNION_TYPE ), (ARRAY_TYPE)))->type_common.typeless_storage)) | |||
1218 | TYPE_TYPELESS_STORAGE (rli->t)((tree_check4 ((rli->t), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1218, __FUNCTION__, (RECORD_TYPE), (UNION_TYPE), (QUAL_UNION_TYPE ), (ARRAY_TYPE)))->type_common.typeless_storage) = 1; | |||
1219 | ||||
1220 | /* We assume the union's size will be a multiple of a byte so we don't | |||
1221 | bother with BITPOS. */ | |||
1222 | if (TREE_CODE (rli->t)((enum tree_code) (rli->t)->base.code) == UNION_TYPE) | |||
1223 | rli->offset = size_binop (MAX_EXPR, rli->offset, DECL_SIZE_UNIT (field))size_binop_loc (((location_t) 0), MAX_EXPR, rli->offset, ( (contains_struct_check ((field), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1223, __FUNCTION__))->decl_common.size_unit)); | |||
1224 | else if (TREE_CODE (rli->t)((enum tree_code) (rli->t)->base.code) == QUAL_UNION_TYPE) | |||
1225 | rli->offset = fold_build3 (COND_EXPR, sizetype, DECL_QUALIFIER (field),fold_build3_loc (((location_t) 0), COND_EXPR, sizetype_tab[(int ) stk_sizetype], ((tree_check ((field), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1225, __FUNCTION__, (FIELD_DECL)))->field_decl.qualifier ), ((contains_struct_check ((field), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1226, __FUNCTION__))->decl_common.size_unit), rli->offset ) | |||
1226 | DECL_SIZE_UNIT (field), rli->offset)fold_build3_loc (((location_t) 0), COND_EXPR, sizetype_tab[(int ) stk_sizetype], ((tree_check ((field), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1225, __FUNCTION__, (FIELD_DECL)))->field_decl.qualifier ), ((contains_struct_check ((field), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1226, __FUNCTION__))->decl_common.size_unit), rli->offset ); | |||
1227 | } | |||
1228 | ||||
1229 | /* A bitfield of SIZE with a required access alignment of ALIGN is allocated | |||
1230 | at BYTE_OFFSET / BIT_OFFSET. Return nonzero if the field would span more | |||
1231 | units of alignment than the underlying TYPE. */ | |||
1232 | static int | |||
1233 | excess_unit_span (HOST_WIDE_INTlong byte_offset, HOST_WIDE_INTlong bit_offset, | |||
1234 | HOST_WIDE_INTlong size, HOST_WIDE_INTlong align, tree type) | |||
1235 | { | |||
1236 | /* Note that the calculation of OFFSET might overflow; we calculate it so | |||
1237 | that we still get the right result as long as ALIGN is a power of two. */ | |||
1238 | unsigned HOST_WIDE_INTlong offset = byte_offset * BITS_PER_UNIT(8) + bit_offset; | |||
1239 | ||||
1240 | offset = offset % align; | |||
1241 | return ((offset + size + align - 1) / align | |||
1242 | > tree_to_uhwi (TYPE_SIZE (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1242, __FUNCTION__))->type_common.size)) / align); | |||
1243 | } | |||
1244 | ||||
1245 | /* RLI contains information about the layout of a RECORD_TYPE. FIELD | |||
1246 | is a FIELD_DECL to be added after those fields already present in | |||
1247 | T. (FIELD is not actually added to the TYPE_FIELDS list here; | |||
1248 | callers that desire that behavior must manually perform that step.) */ | |||
1249 | ||||
1250 | void | |||
1251 | place_field (record_layout_info rli, tree field) | |||
1252 | { | |||
1253 | /* The alignment required for FIELD. */ | |||
1254 | unsigned int desired_align; | |||
1255 | /* The alignment FIELD would have if we just dropped it into the | |||
1256 | record as it presently stands. */ | |||
1257 | unsigned int known_align; | |||
1258 | unsigned int actual_align; | |||
1259 | /* The type of this field. */ | |||
1260 | tree type = TREE_TYPE (field)((contains_struct_check ((field), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1260, __FUNCTION__))->typed.type); | |||
1261 | ||||
1262 | gcc_assert (TREE_CODE (field) != ERROR_MARK)((void)(!(((enum tree_code) (field)->base.code) != ERROR_MARK ) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1262, __FUNCTION__), 0 : 0)); | |||
1263 | ||||
1264 | /* If FIELD is static, then treat it like a separate variable, not | |||
1265 | really like a structure field. If it is a FUNCTION_DECL, it's a | |||
1266 | method. In both cases, all we do is lay out the decl, and we do | |||
1267 | it *after* the record is laid out. */ | |||
1268 | if (VAR_P (field)(((enum tree_code) (field)->base.code) == VAR_DECL)) | |||
1269 | { | |||
1270 | vec_safe_push (rli->pending_statics, field); | |||
1271 | return; | |||
1272 | } | |||
1273 | ||||
1274 | /* Enumerators and enum types which are local to this class need not | |||
1275 | be laid out. Likewise for initialized constant fields. */ | |||
1276 | else if (TREE_CODE (field)((enum tree_code) (field)->base.code) != FIELD_DECL) | |||
1277 | return; | |||
1278 | ||||
1279 | /* Unions are laid out very differently than records, so split | |||
1280 | that code off to another function. */ | |||
1281 | else if (TREE_CODE (rli->t)((enum tree_code) (rli->t)->base.code) != RECORD_TYPE) | |||
1282 | { | |||
1283 | place_union_field (rli, field); | |||
1284 | return; | |||
1285 | } | |||
1286 | ||||
1287 | else if (TREE_CODE (type)((enum tree_code) (type)->base.code) == ERROR_MARK) | |||
1288 | { | |||
1289 | /* Place this field at the current allocation position, so we | |||
1290 | maintain monotonicity. */ | |||
1291 | DECL_FIELD_OFFSET (field)((tree_check ((field), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1291, __FUNCTION__, (FIELD_DECL)))->field_decl.offset) = rli->offset; | |||
1292 | DECL_FIELD_BIT_OFFSET (field)((tree_check ((field), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1292, __FUNCTION__, (FIELD_DECL)))->field_decl.bit_offset ) = rli->bitpos; | |||
1293 | SET_DECL_OFFSET_ALIGN (field, rli->offset_align)((tree_check ((field), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1293, __FUNCTION__, (FIELD_DECL)))->decl_common.off_align = ffs_hwi (rli->offset_align) - 1); | |||
1294 | handle_warn_if_not_align (field, rli->record_align); | |||
1295 | return; | |||
1296 | } | |||
1297 | ||||
1298 | if (AGGREGATE_TYPE_P (type)(((enum tree_code) (type)->base.code) == ARRAY_TYPE || ((( enum tree_code) (type)->base.code) == RECORD_TYPE || ((enum tree_code) (type)->base.code) == UNION_TYPE || ((enum tree_code ) (type)->base.code) == QUAL_UNION_TYPE)) | |||
1299 | && TYPE_TYPELESS_STORAGE (type)((tree_check4 ((type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1299, __FUNCTION__, (RECORD_TYPE), (UNION_TYPE), (QUAL_UNION_TYPE ), (ARRAY_TYPE)))->type_common.typeless_storage)) | |||
1300 | TYPE_TYPELESS_STORAGE (rli->t)((tree_check4 ((rli->t), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1300, __FUNCTION__, (RECORD_TYPE), (UNION_TYPE), (QUAL_UNION_TYPE ), (ARRAY_TYPE)))->type_common.typeless_storage) = 1; | |||
1301 | ||||
1302 | /* Work out the known alignment so far. Note that A & (-A) is the | |||
1303 | value of the least-significant bit in A that is one. */ | |||
1304 | if (! integer_zerop (rli->bitpos)) | |||
1305 | known_align = least_bit_hwi (tree_to_uhwi (rli->bitpos)); | |||
1306 | else if (integer_zerop (rli->offset)) | |||
1307 | known_align = 0; | |||
1308 | else if (tree_fits_uhwi_p (rli->offset)) | |||
1309 | known_align = (BITS_PER_UNIT(8) | |||
1310 | * least_bit_hwi (tree_to_uhwi (rli->offset))); | |||
1311 | else | |||
1312 | known_align = rli->offset_align; | |||
1313 | ||||
1314 | desired_align = update_alignment_for_field (rli, field, known_align); | |||
1315 | if (known_align == 0) | |||
1316 | known_align = MAX (BIGGEST_ALIGNMENT, rli->record_align)(((((global_options.x_target_flags & (1U << 12)) != 0) ? 32 : (((global_options.x_ix86_isa_flags & (1UL << 15)) != 0) ? 512 : (((global_options.x_ix86_isa_flags & ( 1UL << 8)) != 0) ? 256 : 128)))) > (rli->record_align ) ? ((((global_options.x_target_flags & (1U << 12)) != 0) ? 32 : (((global_options.x_ix86_isa_flags & (1UL << 15)) != 0) ? 512 : (((global_options.x_ix86_isa_flags & ( 1UL << 8)) != 0) ? 256 : 128)))) : (rli->record_align )); | |||
1317 | ||||
1318 | if (warn_packedglobal_options.x_warn_packed && DECL_PACKED (field)((tree_check ((field), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1318, __FUNCTION__, (FIELD_DECL)))->base.u.bits.packed_flag )) | |||
1319 | { | |||
1320 | if (known_align >= TYPE_ALIGN (type)(((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1320, __FUNCTION__))->type_common.align) ? ((unsigned)1) << (((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1320, __FUNCTION__))->type_common.align) - 1) : 0)) | |||
1321 | { | |||
1322 | if (TYPE_ALIGN (type)(((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1322, __FUNCTION__))->type_common.align) ? ((unsigned)1) << (((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1322, __FUNCTION__))->type_common.align) - 1) : 0) > desired_align) | |||
1323 | { | |||
1324 | if (STRICT_ALIGNMENT0) | |||
1325 | warning (OPT_Wattributes, "packed attribute causes " | |||
1326 | "inefficient alignment for %q+D", field); | |||
1327 | /* Don't warn if DECL_PACKED was set by the type. */ | |||
1328 | else if (!TYPE_PACKED (rli->t)((tree_class_check ((rli->t), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1328, __FUNCTION__))->base.u.bits.packed_flag)) | |||
1329 | warning (OPT_Wattributes, "packed attribute is " | |||
1330 | "unnecessary for %q+D", field); | |||
1331 | } | |||
1332 | } | |||
1333 | else | |||
1334 | rli->packed_maybe_necessary = 1; | |||
1335 | } | |||
1336 | ||||
1337 | /* Does this field automatically have alignment it needs by virtue | |||
1338 | of the fields that precede it and the record's own alignment? */ | |||
1339 | if (known_align < desired_align | |||
1340 | && (! targetm.ms_bitfield_layout_p (rli->t) | |||
1341 | || rli->prev_field == NULLnullptr)) | |||
1342 | { | |||
1343 | /* No, we need to skip space before this field. | |||
1344 | Bump the cumulative size to multiple of field alignment. */ | |||
1345 | ||||
1346 | if (!targetm.ms_bitfield_layout_p (rli->t) | |||
1347 | && DECL_SOURCE_LOCATION (field)((contains_struct_check ((field), (TS_DECL_MINIMAL), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1347, __FUNCTION__))->decl_minimal.locus) != BUILTINS_LOCATION((location_t) 1) | |||
1348 | && !TYPE_ARTIFICIAL (rli->t)((tree_class_check ((rli->t), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1348, __FUNCTION__))->base.nowarning_flag)) | |||
1349 | warning (OPT_Wpadded, "padding struct to align %q+D", field); | |||
1350 | ||||
1351 | /* If the alignment is still within offset_align, just align | |||
1352 | the bit position. */ | |||
1353 | if (desired_align < rli->offset_align) | |||
1354 | rli->bitpos = round_up (rli->bitpos, desired_align)round_up_loc (((location_t) 0), rli->bitpos, desired_align ); | |||
1355 | else | |||
1356 | { | |||
1357 | /* First adjust OFFSET by the partial bits, then align. */ | |||
1358 | rli->offset | |||
1359 | = size_binop (PLUS_EXPR, rli->offset,size_binop_loc (((location_t) 0), PLUS_EXPR, rli->offset, fold_convert_loc (((location_t) 0), sizetype_tab[(int) stk_sizetype], size_binop_loc (((location_t) 0), CEIL_DIV_EXPR, rli->bitpos, global_trees [TI_BITSIZE_UNIT]))) | |||
1360 | fold_convert (sizetype,size_binop_loc (((location_t) 0), PLUS_EXPR, rli->offset, fold_convert_loc (((location_t) 0), sizetype_tab[(int) stk_sizetype], size_binop_loc (((location_t) 0), CEIL_DIV_EXPR, rli->bitpos, global_trees [TI_BITSIZE_UNIT]))) | |||
1361 | size_binop (CEIL_DIV_EXPR, rli->bitpos,size_binop_loc (((location_t) 0), PLUS_EXPR, rli->offset, fold_convert_loc (((location_t) 0), sizetype_tab[(int) stk_sizetype], size_binop_loc (((location_t) 0), CEIL_DIV_EXPR, rli->bitpos, global_trees [TI_BITSIZE_UNIT]))) | |||
1362 | bitsize_unit_node)))size_binop_loc (((location_t) 0), PLUS_EXPR, rli->offset, fold_convert_loc (((location_t) 0), sizetype_tab[(int) stk_sizetype], size_binop_loc (((location_t) 0), CEIL_DIV_EXPR, rli->bitpos, global_trees [TI_BITSIZE_UNIT]))); | |||
1363 | rli->bitpos = bitsize_zero_nodeglobal_trees[TI_BITSIZE_ZERO]; | |||
1364 | ||||
1365 | rli->offset = round_up (rli->offset, desired_align / BITS_PER_UNIT)round_up_loc (((location_t) 0), rli->offset, desired_align / (8)); | |||
1366 | } | |||
1367 | ||||
1368 | if (! TREE_CONSTANT (rli->offset)((non_type_check ((rli->offset), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1368, __FUNCTION__))->base.constant_flag)) | |||
1369 | rli->offset_align = desired_align; | |||
1370 | } | |||
1371 | ||||
1372 | /* Handle compatibility with PCC. Note that if the record has any | |||
1373 | variable-sized fields, we need not worry about compatibility. */ | |||
1374 | if (PCC_BITFIELD_TYPE_MATTERS1 | |||
1375 | && ! targetm.ms_bitfield_layout_p (rli->t) | |||
1376 | && TREE_CODE (field)((enum tree_code) (field)->base.code) == FIELD_DECL | |||
1377 | && type != error_mark_nodeglobal_trees[TI_ERROR_MARK] | |||
1378 | && DECL_BIT_FIELD (field)((tree_check ((field), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1378, __FUNCTION__, (FIELD_DECL)))->decl_common.decl_flag_1 ) | |||
1379 | && (! DECL_PACKED (field)((tree_check ((field), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1379, __FUNCTION__, (FIELD_DECL)))->base.u.bits.packed_flag ) | |||
1380 | /* Enter for these packed fields only to issue a warning. */ | |||
1381 | || TYPE_ALIGN (type)(((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1381, __FUNCTION__))->type_common.align) ? ((unsigned)1) << (((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1381, __FUNCTION__))->type_common.align) - 1) : 0) <= BITS_PER_UNIT(8)) | |||
1382 | && maximum_field_alignment == 0 | |||
1383 | && ! integer_zerop (DECL_SIZE (field)((contains_struct_check ((field), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1383, __FUNCTION__))->decl_common.size)) | |||
1384 | && tree_fits_uhwi_p (DECL_SIZE (field)((contains_struct_check ((field), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1384, __FUNCTION__))->decl_common.size)) | |||
1385 | && tree_fits_uhwi_p (rli->offset) | |||
1386 | && tree_fits_uhwi_p (TYPE_SIZE (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1386, __FUNCTION__))->type_common.size))) | |||
1387 | { | |||
1388 | unsigned int type_align = TYPE_ALIGN (type)(((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1388, __FUNCTION__))->type_common.align) ? ((unsigned)1) << (((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1388, __FUNCTION__))->type_common.align) - 1) : 0); | |||
1389 | tree dsize = DECL_SIZE (field)((contains_struct_check ((field), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1389, __FUNCTION__))->decl_common.size); | |||
1390 | HOST_WIDE_INTlong field_size = tree_to_uhwi (dsize); | |||
1391 | HOST_WIDE_INTlong offset = tree_to_uhwi (rli->offset); | |||
1392 | HOST_WIDE_INTlong bit_offset = tree_to_shwi (rli->bitpos); | |||
1393 | ||||
1394 | #ifdef ADJUST_FIELD_ALIGN | |||
1395 | if (! TYPE_USER_ALIGN (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1395, __FUNCTION__))->base.u.bits.user_align)) | |||
1396 | type_align = ADJUST_FIELD_ALIGN (field, type, type_align)x86_field_alignment ((type), (type_align)); | |||
1397 | #endif | |||
1398 | ||||
1399 | /* A bit field may not span more units of alignment of its type | |||
1400 | than its type itself. Advance to next boundary if necessary. */ | |||
1401 | if (excess_unit_span (offset, bit_offset, field_size, type_align, type)) | |||
1402 | { | |||
1403 | if (DECL_PACKED (field)((tree_check ((field), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1403, __FUNCTION__, (FIELD_DECL)))->base.u.bits.packed_flag )) | |||
1404 | { | |||
1405 | if (warn_packed_bitfield_compatglobal_options.x_warn_packed_bitfield_compat == 1) | |||
1406 | inform | |||
1407 | (input_location, | |||
1408 | "offset of packed bit-field %qD has changed in GCC 4.4", | |||
1409 | field); | |||
1410 | } | |||
1411 | else | |||
1412 | rli->bitpos = round_up (rli->bitpos, type_align)round_up_loc (((location_t) 0), rli->bitpos, type_align); | |||
1413 | } | |||
1414 | ||||
1415 | if (! DECL_PACKED (field)((tree_check ((field), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1415, __FUNCTION__, (FIELD_DECL)))->base.u.bits.packed_flag )) | |||
1416 | TYPE_USER_ALIGN (rli->t)((tree_class_check ((rli->t), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1416, __FUNCTION__))->base.u.bits.user_align) |= TYPE_USER_ALIGN (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1416, __FUNCTION__))->base.u.bits.user_align); | |||
1417 | ||||
1418 | SET_TYPE_WARN_IF_NOT_ALIGN (rli->t,((tree_class_check ((rli->t), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1419, __FUNCTION__))->type_common.warn_if_not_align = ffs_hwi (((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1419, __FUNCTION__))->type_common.warn_if_not_align ? (( unsigned)1) << ((type)->type_common.warn_if_not_align - 1) : 0))) | |||
1419 | TYPE_WARN_IF_NOT_ALIGN (type))((tree_class_check ((rli->t), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1419, __FUNCTION__))->type_common.warn_if_not_align = ffs_hwi (((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1419, __FUNCTION__))->type_common.warn_if_not_align ? (( unsigned)1) << ((type)->type_common.warn_if_not_align - 1) : 0))); | |||
1420 | } | |||
1421 | ||||
1422 | #ifdef BITFIELD_NBYTES_LIMITED | |||
1423 | if (BITFIELD_NBYTES_LIMITED | |||
1424 | && ! targetm.ms_bitfield_layout_p (rli->t) | |||
1425 | && TREE_CODE (field)((enum tree_code) (field)->base.code) == FIELD_DECL | |||
1426 | && type != error_mark_nodeglobal_trees[TI_ERROR_MARK] | |||
1427 | && DECL_BIT_FIELD_TYPE (field)((tree_check ((field), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1427, __FUNCTION__, (FIELD_DECL)))->field_decl.bit_field_type ) | |||
1428 | && ! DECL_PACKED (field)((tree_check ((field), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1428, __FUNCTION__, (FIELD_DECL)))->base.u.bits.packed_flag ) | |||
1429 | && ! integer_zerop (DECL_SIZE (field)((contains_struct_check ((field), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1429, __FUNCTION__))->decl_common.size)) | |||
1430 | && tree_fits_uhwi_p (DECL_SIZE (field)((contains_struct_check ((field), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1430, __FUNCTION__))->decl_common.size)) | |||
1431 | && tree_fits_uhwi_p (rli->offset) | |||
1432 | && tree_fits_uhwi_p (TYPE_SIZE (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1432, __FUNCTION__))->type_common.size))) | |||
1433 | { | |||
1434 | unsigned int type_align = TYPE_ALIGN (type)(((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1434, __FUNCTION__))->type_common.align) ? ((unsigned)1) << (((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1434, __FUNCTION__))->type_common.align) - 1) : 0); | |||
1435 | tree dsize = DECL_SIZE (field)((contains_struct_check ((field), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1435, __FUNCTION__))->decl_common.size); | |||
1436 | HOST_WIDE_INTlong field_size = tree_to_uhwi (dsize); | |||
1437 | HOST_WIDE_INTlong offset = tree_to_uhwi (rli->offset); | |||
1438 | HOST_WIDE_INTlong bit_offset = tree_to_shwi (rli->bitpos); | |||
1439 | ||||
1440 | #ifdef ADJUST_FIELD_ALIGN | |||
1441 | if (! TYPE_USER_ALIGN (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1441, __FUNCTION__))->base.u.bits.user_align)) | |||
1442 | type_align = ADJUST_FIELD_ALIGN (field, type, type_align)x86_field_alignment ((type), (type_align)); | |||
1443 | #endif | |||
1444 | ||||
1445 | if (maximum_field_alignment != 0) | |||
1446 | type_align = MIN (type_align, maximum_field_alignment)((type_align) < (maximum_field_alignment) ? (type_align) : (maximum_field_alignment)); | |||
1447 | /* ??? This test is opposite the test in the containing if | |||
1448 | statement, so this code is unreachable currently. */ | |||
1449 | else if (DECL_PACKED (field)((tree_check ((field), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1449, __FUNCTION__, (FIELD_DECL)))->base.u.bits.packed_flag )) | |||
1450 | type_align = MIN (type_align, BITS_PER_UNIT)((type_align) < ((8)) ? (type_align) : ((8))); | |||
1451 | ||||
1452 | /* A bit field may not span the unit of alignment of its type. | |||
1453 | Advance to next boundary if necessary. */ | |||
1454 | if (excess_unit_span (offset, bit_offset, field_size, type_align, type)) | |||
1455 | rli->bitpos = round_up (rli->bitpos, type_align)round_up_loc (((location_t) 0), rli->bitpos, type_align); | |||
1456 | ||||
1457 | TYPE_USER_ALIGN (rli->t)((tree_class_check ((rli->t), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1457, __FUNCTION__))->base.u.bits.user_align) |= TYPE_USER_ALIGN (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1457, __FUNCTION__))->base.u.bits.user_align); | |||
1458 | SET_TYPE_WARN_IF_NOT_ALIGN (rli->t,((tree_class_check ((rli->t), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1459, __FUNCTION__))->type_common.warn_if_not_align = ffs_hwi (((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1459, __FUNCTION__))->type_common.warn_if_not_align ? (( unsigned)1) << ((type)->type_common.warn_if_not_align - 1) : 0))) | |||
1459 | TYPE_WARN_IF_NOT_ALIGN (type))((tree_class_check ((rli->t), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1459, __FUNCTION__))->type_common.warn_if_not_align = ffs_hwi (((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1459, __FUNCTION__))->type_common.warn_if_not_align ? (( unsigned)1) << ((type)->type_common.warn_if_not_align - 1) : 0))); | |||
1460 | } | |||
1461 | #endif | |||
1462 | ||||
1463 | /* See the docs for TARGET_MS_BITFIELD_LAYOUT_P for details. | |||
1464 | A subtlety: | |||
1465 | When a bit field is inserted into a packed record, the whole | |||
1466 | size of the underlying type is used by one or more same-size | |||
1467 | adjacent bitfields. (That is, if its long:3, 32 bits is | |||
1468 | used in the record, and any additional adjacent long bitfields are | |||
1469 | packed into the same chunk of 32 bits. However, if the size | |||
1470 | changes, a new field of that size is allocated.) In an unpacked | |||
1471 | record, this is the same as using alignment, but not equivalent | |||
1472 | when packing. | |||
1473 | ||||
1474 | Note: for compatibility, we use the type size, not the type alignment | |||
1475 | to determine alignment, since that matches the documentation */ | |||
1476 | ||||
1477 | if (targetm.ms_bitfield_layout_p (rli->t)) | |||
1478 | { | |||
1479 | tree prev_saved = rli->prev_field; | |||
1480 | tree prev_type = prev_saved ? DECL_BIT_FIELD_TYPE (prev_saved)((tree_check ((prev_saved), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1480, __FUNCTION__, (FIELD_DECL)))->field_decl.bit_field_type ) : NULLnullptr; | |||
1481 | ||||
1482 | /* This is a bitfield if it exists. */ | |||
1483 | if (rli->prev_field) | |||
1484 | { | |||
1485 | bool realign_p = known_align < desired_align; | |||
1486 | ||||
1487 | /* If both are bitfields, nonzero, and the same size, this is | |||
1488 | the middle of a run. Zero declared size fields are special | |||
1489 | and handled as "end of run". (Note: it's nonzero declared | |||
1490 | size, but equal type sizes!) (Since we know that both | |||
1491 | the current and previous fields are bitfields by the | |||
1492 | time we check it, DECL_SIZE must be present for both.) */ | |||
1493 | if (DECL_BIT_FIELD_TYPE (field)((tree_check ((field), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1493, __FUNCTION__, (FIELD_DECL)))->field_decl.bit_field_type ) | |||
1494 | && !integer_zerop (DECL_SIZE (field)((contains_struct_check ((field), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1494, __FUNCTION__))->decl_common.size)) | |||
1495 | && !integer_zerop (DECL_SIZE (rli->prev_field)((contains_struct_check ((rli->prev_field), (TS_DECL_COMMON ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1495, __FUNCTION__))->decl_common.size)) | |||
1496 | && tree_fits_shwi_p (DECL_SIZE (rli->prev_field)((contains_struct_check ((rli->prev_field), (TS_DECL_COMMON ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1496, __FUNCTION__))->decl_common.size)) | |||
1497 | && tree_fits_uhwi_p (TYPE_SIZE (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1497, __FUNCTION__))->type_common.size)) | |||
1498 | && simple_cst_equal (TYPE_SIZE (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1498, __FUNCTION__))->type_common.size), TYPE_SIZE (prev_type)((tree_class_check ((prev_type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1498, __FUNCTION__))->type_common.size))) | |||
1499 | { | |||
1500 | /* We're in the middle of a run of equal type size fields; make | |||
1501 | sure we realign if we run out of bits. (Not decl size, | |||
1502 | type size!) */ | |||
1503 | HOST_WIDE_INTlong bitsize = tree_to_uhwi (DECL_SIZE (field)((contains_struct_check ((field), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1503, __FUNCTION__))->decl_common.size)); | |||
1504 | ||||
1505 | if (rli->remaining_in_alignment < bitsize) | |||
1506 | { | |||
1507 | HOST_WIDE_INTlong typesize = tree_to_uhwi (TYPE_SIZE (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1507, __FUNCTION__))->type_common.size)); | |||
1508 | ||||
1509 | /* out of bits; bump up to next 'word'. */ | |||
1510 | rli->bitpos | |||
1511 | = size_binop (PLUS_EXPR, rli->bitpos,size_binop_loc (((location_t) 0), PLUS_EXPR, rli->bitpos, size_int_kind (rli->remaining_in_alignment, stk_bitsizetype)) | |||
1512 | bitsize_int (rli->remaining_in_alignment))size_binop_loc (((location_t) 0), PLUS_EXPR, rli->bitpos, size_int_kind (rli->remaining_in_alignment, stk_bitsizetype)); | |||
1513 | rli->prev_field = field; | |||
1514 | if (typesize < bitsize) | |||
1515 | rli->remaining_in_alignment = 0; | |||
1516 | else | |||
1517 | rli->remaining_in_alignment = typesize - bitsize; | |||
1518 | } | |||
1519 | else | |||
1520 | { | |||
1521 | rli->remaining_in_alignment -= bitsize; | |||
1522 | realign_p = false; | |||
1523 | } | |||
1524 | } | |||
1525 | else | |||
1526 | { | |||
1527 | /* End of a run: if leaving a run of bitfields of the same type | |||
1528 | size, we have to "use up" the rest of the bits of the type | |||
1529 | size. | |||
1530 | ||||
1531 | Compute the new position as the sum of the size for the prior | |||
1532 | type and where we first started working on that type. | |||
1533 | Note: since the beginning of the field was aligned then | |||
1534 | of course the end will be too. No round needed. */ | |||
1535 | ||||
1536 | if (!integer_zerop (DECL_SIZE (rli->prev_field)((contains_struct_check ((rli->prev_field), (TS_DECL_COMMON ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1536, __FUNCTION__))->decl_common.size))) | |||
1537 | { | |||
1538 | rli->bitpos | |||
1539 | = size_binop (PLUS_EXPR, rli->bitpos,size_binop_loc (((location_t) 0), PLUS_EXPR, rli->bitpos, size_int_kind (rli->remaining_in_alignment, stk_bitsizetype)) | |||
1540 | bitsize_int (rli->remaining_in_alignment))size_binop_loc (((location_t) 0), PLUS_EXPR, rli->bitpos, size_int_kind (rli->remaining_in_alignment, stk_bitsizetype)); | |||
1541 | } | |||
1542 | else | |||
1543 | /* We "use up" size zero fields; the code below should behave | |||
1544 | as if the prior field was not a bitfield. */ | |||
1545 | prev_saved = NULLnullptr; | |||
1546 | ||||
1547 | /* Cause a new bitfield to be captured, either this time (if | |||
1548 | currently a bitfield) or next time we see one. */ | |||
1549 | if (!DECL_BIT_FIELD_TYPE (field)((tree_check ((field), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1549, __FUNCTION__, (FIELD_DECL)))->field_decl.bit_field_type ) | |||
1550 | || integer_zerop (DECL_SIZE (field)((contains_struct_check ((field), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1550, __FUNCTION__))->decl_common.size))) | |||
1551 | rli->prev_field = NULLnullptr; | |||
1552 | } | |||
1553 | ||||
1554 | /* Does this field automatically have alignment it needs by virtue | |||
1555 | of the fields that precede it and the record's own alignment? */ | |||
1556 | if (realign_p) | |||
1557 | { | |||
1558 | /* If the alignment is still within offset_align, just align | |||
1559 | the bit position. */ | |||
1560 | if (desired_align < rli->offset_align) | |||
1561 | rli->bitpos = round_up (rli->bitpos, desired_align)round_up_loc (((location_t) 0), rli->bitpos, desired_align ); | |||
1562 | else | |||
1563 | { | |||
1564 | /* First adjust OFFSET by the partial bits, then align. */ | |||
1565 | tree d = size_binop (CEIL_DIV_EXPR, rli->bitpos,size_binop_loc (((location_t) 0), CEIL_DIV_EXPR, rli->bitpos , global_trees[TI_BITSIZE_UNIT]) | |||
1566 | bitsize_unit_node)size_binop_loc (((location_t) 0), CEIL_DIV_EXPR, rli->bitpos , global_trees[TI_BITSIZE_UNIT]); | |||
1567 | rli->offset = size_binop (PLUS_EXPR, rli->offset,size_binop_loc (((location_t) 0), PLUS_EXPR, rli->offset, fold_convert_loc (((location_t) 0), sizetype_tab[(int) stk_sizetype], d)) | |||
1568 | fold_convert (sizetype, d))size_binop_loc (((location_t) 0), PLUS_EXPR, rli->offset, fold_convert_loc (((location_t) 0), sizetype_tab[(int) stk_sizetype], d)); | |||
1569 | rli->bitpos = bitsize_zero_nodeglobal_trees[TI_BITSIZE_ZERO]; | |||
1570 | ||||
1571 | rli->offset = round_up (rli->offset,round_up_loc (((location_t) 0), rli->offset, desired_align / (8)) | |||
1572 | desired_align / BITS_PER_UNIT)round_up_loc (((location_t) 0), rli->offset, desired_align / (8)); | |||
1573 | } | |||
1574 | ||||
1575 | if (! TREE_CONSTANT (rli->offset)((non_type_check ((rli->offset), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1575, __FUNCTION__))->base.constant_flag)) | |||
1576 | rli->offset_align = desired_align; | |||
1577 | } | |||
1578 | ||||
1579 | normalize_rli (rli); | |||
1580 | } | |||
1581 | ||||
1582 | /* If we're starting a new run of same type size bitfields | |||
1583 | (or a run of non-bitfields), set up the "first of the run" | |||
1584 | fields. | |||
1585 | ||||
1586 | That is, if the current field is not a bitfield, or if there | |||
1587 | was a prior bitfield the type sizes differ, or if there wasn't | |||
1588 | a prior bitfield the size of the current field is nonzero. | |||
1589 | ||||
1590 | Note: we must be sure to test ONLY the type size if there was | |||
1591 | a prior bitfield and ONLY for the current field being zero if | |||
1592 | there wasn't. */ | |||
1593 | ||||
1594 | if (!DECL_BIT_FIELD_TYPE (field)((tree_check ((field), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1594, __FUNCTION__, (FIELD_DECL)))->field_decl.bit_field_type ) | |||
1595 | || (prev_saved != NULLnullptr | |||
1596 | ? !simple_cst_equal (TYPE_SIZE (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1596, __FUNCTION__))->type_common.size), TYPE_SIZE (prev_type)((tree_class_check ((prev_type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1596, __FUNCTION__))->type_common.size)) | |||
1597 | : !integer_zerop (DECL_SIZE (field)((contains_struct_check ((field), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1597, __FUNCTION__))->decl_common.size)))) | |||
1598 | { | |||
1599 | /* Never smaller than a byte for compatibility. */ | |||
1600 | unsigned int type_align = BITS_PER_UNIT(8); | |||
1601 | ||||
1602 | /* (When not a bitfield), we could be seeing a flex array (with | |||
1603 | no DECL_SIZE). Since we won't be using remaining_in_alignment | |||
1604 | until we see a bitfield (and come by here again) we just skip | |||
1605 | calculating it. */ | |||
1606 | if (DECL_SIZE (field)((contains_struct_check ((field), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1606, __FUNCTION__))->decl_common.size) != NULLnullptr | |||
1607 | && tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (field))((tree_class_check ((((contains_struct_check ((field), (TS_TYPED ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1607, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1607, __FUNCTION__))->type_common.size)) | |||
1608 | && tree_fits_uhwi_p (DECL_SIZE (field)((contains_struct_check ((field), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1608, __FUNCTION__))->decl_common.size))) | |||
1609 | { | |||
1610 | unsigned HOST_WIDE_INTlong bitsize | |||
1611 | = tree_to_uhwi (DECL_SIZE (field)((contains_struct_check ((field), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1611, __FUNCTION__))->decl_common.size)); | |||
1612 | unsigned HOST_WIDE_INTlong typesize | |||
1613 | = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (field))((tree_class_check ((((contains_struct_check ((field), (TS_TYPED ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1613, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1613, __FUNCTION__))->type_common.size)); | |||
1614 | ||||
1615 | if (typesize < bitsize) | |||
1616 | rli->remaining_in_alignment = 0; | |||
1617 | else | |||
1618 | rli->remaining_in_alignment = typesize - bitsize; | |||
1619 | } | |||
1620 | ||||
1621 | /* Now align (conventionally) for the new type. */ | |||
1622 | if (! DECL_PACKED (field)((tree_check ((field), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1622, __FUNCTION__, (FIELD_DECL)))->base.u.bits.packed_flag )) | |||
1623 | type_align = TYPE_ALIGN (TREE_TYPE (field))(((tree_class_check ((((contains_struct_check ((field), (TS_TYPED ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1623, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1623, __FUNCTION__))->type_common.align) ? ((unsigned)1) << (((tree_class_check ((((contains_struct_check ((field ), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1623, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1623, __FUNCTION__))->type_common.align) - 1) : 0); | |||
1624 | ||||
1625 | if (maximum_field_alignment != 0) | |||
1626 | type_align = MIN (type_align, maximum_field_alignment)((type_align) < (maximum_field_alignment) ? (type_align) : (maximum_field_alignment)); | |||
1627 | ||||
1628 | rli->bitpos = round_up (rli->bitpos, type_align)round_up_loc (((location_t) 0), rli->bitpos, type_align); | |||
1629 | ||||
1630 | /* If we really aligned, don't allow subsequent bitfields | |||
1631 | to undo that. */ | |||
1632 | rli->prev_field = NULLnullptr; | |||
1633 | } | |||
1634 | } | |||
1635 | ||||
1636 | /* Offset so far becomes the position of this field after normalizing. */ | |||
1637 | normalize_rli (rli); | |||
1638 | DECL_FIELD_OFFSET (field)((tree_check ((field), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1638, __FUNCTION__, (FIELD_DECL)))->field_decl.offset) = rli->offset; | |||
1639 | DECL_FIELD_BIT_OFFSET (field)((tree_check ((field), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1639, __FUNCTION__, (FIELD_DECL)))->field_decl.bit_offset ) = rli->bitpos; | |||
1640 | SET_DECL_OFFSET_ALIGN (field, rli->offset_align)((tree_check ((field), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1640, __FUNCTION__, (FIELD_DECL)))->decl_common.off_align = ffs_hwi (rli->offset_align) - 1); | |||
1641 | handle_warn_if_not_align (field, rli->record_align); | |||
1642 | ||||
1643 | /* Evaluate nonconstant offsets only once, either now or as soon as safe. */ | |||
1644 | if (TREE_CODE (DECL_FIELD_OFFSET (field))((enum tree_code) (((tree_check ((field), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1644, __FUNCTION__, (FIELD_DECL)))->field_decl.offset))-> base.code) != INTEGER_CST) | |||
1645 | DECL_FIELD_OFFSET (field)((tree_check ((field), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1645, __FUNCTION__, (FIELD_DECL)))->field_decl.offset) = variable_size (DECL_FIELD_OFFSET (field)((tree_check ((field), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1645, __FUNCTION__, (FIELD_DECL)))->field_decl.offset)); | |||
1646 | ||||
1647 | /* If this field ended up more aligned than we thought it would be (we | |||
1648 | approximate this by seeing if its position changed), lay out the field | |||
1649 | again; perhaps we can use an integral mode for it now. */ | |||
1650 | if (! integer_zerop (DECL_FIELD_BIT_OFFSET (field)((tree_check ((field), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1650, __FUNCTION__, (FIELD_DECL)))->field_decl.bit_offset ))) | |||
1651 | actual_align = least_bit_hwi (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field)((tree_check ((field), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1651, __FUNCTION__, (FIELD_DECL)))->field_decl.bit_offset ))); | |||
1652 | else if (integer_zerop (DECL_FIELD_OFFSET (field)((tree_check ((field), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1652, __FUNCTION__, (FIELD_DECL)))->field_decl.offset))) | |||
1653 | actual_align = MAX (BIGGEST_ALIGNMENT, rli->record_align)(((((global_options.x_target_flags & (1U << 12)) != 0) ? 32 : (((global_options.x_ix86_isa_flags & (1UL << 15)) != 0) ? 512 : (((global_options.x_ix86_isa_flags & ( 1UL << 8)) != 0) ? 256 : 128)))) > (rli->record_align ) ? ((((global_options.x_target_flags & (1U << 12)) != 0) ? 32 : (((global_options.x_ix86_isa_flags & (1UL << 15)) != 0) ? 512 : (((global_options.x_ix86_isa_flags & ( 1UL << 8)) != 0) ? 256 : 128)))) : (rli->record_align )); | |||
1654 | else if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field)((tree_check ((field), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1654, __FUNCTION__, (FIELD_DECL)))->field_decl.offset))) | |||
1655 | actual_align = (BITS_PER_UNIT(8) | |||
1656 | * least_bit_hwi (tree_to_uhwi (DECL_FIELD_OFFSET (field)((tree_check ((field), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1656, __FUNCTION__, (FIELD_DECL)))->field_decl.offset)))); | |||
1657 | else | |||
1658 | actual_align = DECL_OFFSET_ALIGN (field)(((unsigned long)1) << (tree_check ((field), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1658, __FUNCTION__, (FIELD_DECL)))->decl_common.off_align ); | |||
1659 | /* ACTUAL_ALIGN is still the actual alignment *within the record* . | |||
1660 | store / extract bit field operations will check the alignment of the | |||
1661 | record against the mode of bit fields. */ | |||
1662 | ||||
1663 | if (known_align != actual_align) | |||
1664 | layout_decl (field, actual_align); | |||
1665 | ||||
1666 | if (rli->prev_field == NULLnullptr && DECL_BIT_FIELD_TYPE (field)((tree_check ((field), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1666, __FUNCTION__, (FIELD_DECL)))->field_decl.bit_field_type )) | |||
1667 | rli->prev_field = field; | |||
1668 | ||||
1669 | /* Now add size of this field to the size of the record. If the size is | |||
1670 | not constant, treat the field as being a multiple of bytes and just | |||
1671 | adjust the offset, resetting the bit position. Otherwise, apportion the | |||
1672 | size amongst the bit position and offset. First handle the case of an | |||
1673 | unspecified size, which can happen when we have an invalid nested struct | |||
1674 | definition, such as struct j { struct j { int i; } }. The error message | |||
1675 | is printed in finish_struct. */ | |||
1676 | if (DECL_SIZE (field)((contains_struct_check ((field), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1676, __FUNCTION__))->decl_common.size) == 0) | |||
1677 | /* Do nothing. */; | |||
1678 | else if (TREE_CODE (DECL_SIZE (field))((enum tree_code) (((contains_struct_check ((field), (TS_DECL_COMMON ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1678, __FUNCTION__))->decl_common.size))->base.code) != INTEGER_CST | |||
1679 | || TREE_OVERFLOW (DECL_SIZE (field))((tree_class_check ((((contains_struct_check ((field), (TS_DECL_COMMON ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1679, __FUNCTION__))->decl_common.size)), (tcc_constant) , "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1679, __FUNCTION__))->base.public_flag)) | |||
1680 | { | |||
1681 | rli->offset | |||
1682 | = size_binop (PLUS_EXPR, rli->offset,size_binop_loc (((location_t) 0), PLUS_EXPR, rli->offset, fold_convert_loc (((location_t) 0), sizetype_tab[(int) stk_sizetype], size_binop_loc (((location_t) 0), CEIL_DIV_EXPR, rli->bitpos, global_trees [TI_BITSIZE_UNIT]))) | |||
1683 | fold_convert (sizetype,size_binop_loc (((location_t) 0), PLUS_EXPR, rli->offset, fold_convert_loc (((location_t) 0), sizetype_tab[(int) stk_sizetype], size_binop_loc (((location_t) 0), CEIL_DIV_EXPR, rli->bitpos, global_trees [TI_BITSIZE_UNIT]))) | |||
1684 | size_binop (CEIL_DIV_EXPR, rli->bitpos,size_binop_loc (((location_t) 0), PLUS_EXPR, rli->offset, fold_convert_loc (((location_t) 0), sizetype_tab[(int) stk_sizetype], size_binop_loc (((location_t) 0), CEIL_DIV_EXPR, rli->bitpos, global_trees [TI_BITSIZE_UNIT]))) | |||
1685 | bitsize_unit_node)))size_binop_loc (((location_t) 0), PLUS_EXPR, rli->offset, fold_convert_loc (((location_t) 0), sizetype_tab[(int) stk_sizetype], size_binop_loc (((location_t) 0), CEIL_DIV_EXPR, rli->bitpos, global_trees [TI_BITSIZE_UNIT]))); | |||
1686 | rli->offset | |||
1687 | = size_binop (PLUS_EXPR, rli->offset, DECL_SIZE_UNIT (field))size_binop_loc (((location_t) 0), PLUS_EXPR, rli->offset, ( (contains_struct_check ((field), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1687, __FUNCTION__))->decl_common.size_unit)); | |||
1688 | rli->bitpos = bitsize_zero_nodeglobal_trees[TI_BITSIZE_ZERO]; | |||
1689 | rli->offset_align = MIN (rli->offset_align, desired_align)((rli->offset_align) < (desired_align) ? (rli->offset_align ) : (desired_align)); | |||
1690 | ||||
1691 | if (!multiple_of_p (bitsizetypesizetype_tab[(int) stk_bitsizetype], DECL_SIZE (field)((contains_struct_check ((field), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1691, __FUNCTION__))->decl_common.size), | |||
1692 | bitsize_int (rli->offset_align)size_int_kind (rli->offset_align, stk_bitsizetype))) | |||
1693 | { | |||
1694 | tree type = strip_array_types (TREE_TYPE (field)((contains_struct_check ((field), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1694, __FUNCTION__))->typed.type)); | |||
1695 | /* The above adjusts offset_align just based on the start of the | |||
1696 | field. The field might not have a size that is a multiple of | |||
1697 | that offset_align though. If the field is an array of fixed | |||
1698 | sized elements, assume there can be any multiple of those | |||
1699 | sizes. If it is a variable length aggregate or array of | |||
1700 | variable length aggregates, assume worst that the end is | |||
1701 | just BITS_PER_UNIT aligned. */ | |||
1702 | if (TREE_CODE (TYPE_SIZE (type))((enum tree_code) (((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1702, __FUNCTION__))->type_common.size))->base.code) == INTEGER_CST) | |||
1703 | { | |||
1704 | if (TREE_INT_CST_LOW (TYPE_SIZE (type))((unsigned long) (*tree_int_cst_elt_check ((((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1704, __FUNCTION__))->type_common.size)), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1704, __FUNCTION__)))) | |||
1705 | { | |||
1706 | unsigned HOST_WIDE_INTlong sz | |||
1707 | = least_bit_hwi (TREE_INT_CST_LOW (TYPE_SIZE (type))((unsigned long) (*tree_int_cst_elt_check ((((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1707, __FUNCTION__))->type_common.size)), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1707, __FUNCTION__)))); | |||
1708 | rli->offset_align = MIN (rli->offset_align, sz)((rli->offset_align) < (sz) ? (rli->offset_align) : ( sz)); | |||
1709 | } | |||
1710 | } | |||
1711 | else | |||
1712 | rli->offset_align = MIN (rli->offset_align, BITS_PER_UNIT)((rli->offset_align) < ((8)) ? (rli->offset_align) : ((8))); | |||
1713 | } | |||
1714 | } | |||
1715 | else if (targetm.ms_bitfield_layout_p (rli->t)) | |||
1716 | { | |||
1717 | rli->bitpos = size_binop (PLUS_EXPR, rli->bitpos, DECL_SIZE (field))size_binop_loc (((location_t) 0), PLUS_EXPR, rli->bitpos, ( (contains_struct_check ((field), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1717, __FUNCTION__))->decl_common.size)); | |||
1718 | ||||
1719 | /* If FIELD is the last field and doesn't end at the full length | |||
1720 | of the type then pad the struct out to the full length of the | |||
1721 | last type. */ | |||
1722 | if (DECL_BIT_FIELD_TYPE (field)((tree_check ((field), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1722, __FUNCTION__, (FIELD_DECL)))->field_decl.bit_field_type ) | |||
1723 | && !integer_zerop (DECL_SIZE (field)((contains_struct_check ((field), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1723, __FUNCTION__))->decl_common.size))) | |||
1724 | { | |||
1725 | /* We have to scan, because non-field DECLS are also here. */ | |||
1726 | tree probe = field; | |||
1727 | while ((probe = DECL_CHAIN (probe)(((contains_struct_check (((contains_struct_check ((probe), ( TS_DECL_MINIMAL), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1727, __FUNCTION__))), (TS_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1727, __FUNCTION__))->common.chain)))) | |||
1728 | if (TREE_CODE (probe)((enum tree_code) (probe)->base.code) == FIELD_DECL) | |||
1729 | break; | |||
1730 | if (!probe) | |||
1731 | rli->bitpos = size_binop (PLUS_EXPR, rli->bitpos,size_binop_loc (((location_t) 0), PLUS_EXPR, rli->bitpos, size_int_kind (rli->remaining_in_alignment, stk_bitsizetype)) | |||
1732 | bitsize_int (rli->remaining_in_alignment))size_binop_loc (((location_t) 0), PLUS_EXPR, rli->bitpos, size_int_kind (rli->remaining_in_alignment, stk_bitsizetype)); | |||
1733 | } | |||
1734 | ||||
1735 | normalize_rli (rli); | |||
1736 | } | |||
1737 | else | |||
1738 | { | |||
1739 | rli->bitpos = size_binop (PLUS_EXPR, rli->bitpos, DECL_SIZE (field))size_binop_loc (((location_t) 0), PLUS_EXPR, rli->bitpos, ( (contains_struct_check ((field), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1739, __FUNCTION__))->decl_common.size)); | |||
1740 | normalize_rli (rli); | |||
1741 | } | |||
1742 | } | |||
1743 | ||||
1744 | /* Assuming that all the fields have been laid out, this function uses | |||
1745 | RLI to compute the final TYPE_SIZE, TYPE_ALIGN, etc. for the type | |||
1746 | indicated by RLI. */ | |||
1747 | ||||
1748 | static void | |||
1749 | finalize_record_size (record_layout_info rli) | |||
1750 | { | |||
1751 | tree unpadded_size, unpadded_size_unit; | |||
1752 | ||||
1753 | /* Now we want just byte and bit offsets, so set the offset alignment | |||
1754 | to be a byte and then normalize. */ | |||
1755 | rli->offset_align = BITS_PER_UNIT(8); | |||
1756 | normalize_rli (rli); | |||
1757 | ||||
1758 | /* Determine the desired alignment. */ | |||
1759 | #ifdef ROUND_TYPE_ALIGN | |||
1760 | SET_TYPE_ALIGN (rli->t, ROUND_TYPE_ALIGN (rli->t, TYPE_ALIGN (rli->t),((tree_class_check ((rli->t), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1761, __FUNCTION__))->type_common.align = ffs_hwi (ROUND_TYPE_ALIGN (rli->t, (((tree_class_check ((rli->t), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1760, __FUNCTION__))->type_common.align) ? ((unsigned)1) << (((tree_class_check ((rli->t), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1760, __FUNCTION__))->type_common.align) - 1) : 0), rli-> record_align))) | |||
1761 | rli->record_align))((tree_class_check ((rli->t), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1761, __FUNCTION__))->type_common.align = ffs_hwi (ROUND_TYPE_ALIGN (rli->t, (((tree_class_check ((rli->t), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1760, __FUNCTION__))->type_common.align) ? ((unsigned)1) << (((tree_class_check ((rli->t), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1760, __FUNCTION__))->type_common.align) - 1) : 0), rli-> record_align))); | |||
1762 | #else | |||
1763 | SET_TYPE_ALIGN (rli->t, MAX (TYPE_ALIGN (rli->t), rli->record_align))((tree_class_check ((rli->t), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1763, __FUNCTION__))->type_common.align = ffs_hwi (((((( tree_class_check ((rli->t), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1763, __FUNCTION__))->type_common.align) ? ((unsigned)1) << (((tree_class_check ((rli->t), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1763, __FUNCTION__))->type_common.align) - 1) : 0)) > (rli->record_align) ? ((((tree_class_check ((rli->t), ( tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1763, __FUNCTION__))->type_common.align) ? ((unsigned)1) << (((tree_class_check ((rli->t), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1763, __FUNCTION__))->type_common.align) - 1) : 0)) : (rli ->record_align)))); | |||
1764 | #endif | |||
1765 | ||||
1766 | /* Compute the size so far. Be sure to allow for extra bits in the | |||
1767 | size in bytes. We have guaranteed above that it will be no more | |||
1768 | than a single byte. */ | |||
1769 | unpadded_size = rli_size_so_far (rli); | |||
1770 | unpadded_size_unit = rli_size_unit_so_far (rli); | |||
1771 | if (! integer_zerop (rli->bitpos)) | |||
1772 | unpadded_size_unit | |||
1773 | = size_binop (PLUS_EXPR, unpadded_size_unit, size_one_node)size_binop_loc (((location_t) 0), PLUS_EXPR, unpadded_size_unit , global_trees[TI_SIZE_ONE]); | |||
1774 | ||||
1775 | /* Round the size up to be a multiple of the required alignment. */ | |||
1776 | TYPE_SIZE (rli->t)((tree_class_check ((rli->t), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1776, __FUNCTION__))->type_common.size) = round_up (unpadded_size, TYPE_ALIGN (rli->t))round_up_loc (((location_t) 0), unpadded_size, (((tree_class_check ((rli->t), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1776, __FUNCTION__))->type_common.align) ? ((unsigned)1) << (((tree_class_check ((rli->t), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1776, __FUNCTION__))->type_common.align) - 1) : 0)); | |||
1777 | TYPE_SIZE_UNIT (rli->t)((tree_class_check ((rli->t), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1777, __FUNCTION__))->type_common.size_unit) | |||
1778 | = round_up (unpadded_size_unit, TYPE_ALIGN_UNIT (rli->t))round_up_loc (((location_t) 0), unpadded_size_unit, ((((tree_class_check ((rli->t), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1778, __FUNCTION__))->type_common.align) ? ((unsigned)1) << (((tree_class_check ((rli->t), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1778, __FUNCTION__))->type_common.align) - 1) : 0) / (8) )); | |||
1779 | ||||
1780 | if (TREE_CONSTANT (unpadded_size)((non_type_check ((unpadded_size), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1780, __FUNCTION__))->base.constant_flag) | |||
1781 | && simple_cst_equal (unpadded_size, TYPE_SIZE (rli->t)((tree_class_check ((rli->t), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1781, __FUNCTION__))->type_common.size)) == 0 | |||
1782 | && input_location != BUILTINS_LOCATION((location_t) 1) | |||
1783 | && !TYPE_ARTIFICIAL (rli->t)((tree_class_check ((rli->t), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1783, __FUNCTION__))->base.nowarning_flag)) | |||
1784 | { | |||
1785 | tree pad_size | |||
1786 | = size_binop (MINUS_EXPR, TYPE_SIZE_UNIT (rli->t), unpadded_size_unit)size_binop_loc (((location_t) 0), MINUS_EXPR, ((tree_class_check ((rli->t), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1786, __FUNCTION__))->type_common.size_unit), unpadded_size_unit ); | |||
1787 | warning (OPT_Wpadded, | |||
1788 | "padding struct size to alignment boundary with %E bytes", pad_size); | |||
1789 | } | |||
1790 | ||||
1791 | if (warn_packedglobal_options.x_warn_packed && TREE_CODE (rli->t)((enum tree_code) (rli->t)->base.code) == RECORD_TYPE | |||
1792 | && TYPE_PACKED (rli->t)((tree_class_check ((rli->t), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1792, __FUNCTION__))->base.u.bits.packed_flag) && ! rli->packed_maybe_necessary | |||
1793 | && TREE_CONSTANT (unpadded_size)((non_type_check ((unpadded_size), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1793, __FUNCTION__))->base.constant_flag)) | |||
1794 | { | |||
1795 | tree unpacked_size; | |||
1796 | ||||
1797 | #ifdef ROUND_TYPE_ALIGN | |||
1798 | rli->unpacked_align | |||
1799 | = ROUND_TYPE_ALIGN (rli->t, TYPE_ALIGN (rli->t)(((tree_class_check ((rli->t), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1799, __FUNCTION__))->type_common.align) ? ((unsigned)1) << (((tree_class_check ((rli->t), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1799, __FUNCTION__))->type_common.align) - 1) : 0), rli->unpacked_align); | |||
1800 | #else | |||
1801 | rli->unpacked_align = MAX (TYPE_ALIGN (rli->t), rli->unpacked_align)(((((tree_class_check ((rli->t), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1801, __FUNCTION__))->type_common.align) ? ((unsigned)1) << (((tree_class_check ((rli->t), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1801, __FUNCTION__))->type_common.align) - 1) : 0)) > (rli->unpacked_align) ? ((((tree_class_check ((rli->t) , (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1801, __FUNCTION__))->type_common.align) ? ((unsigned)1) << (((tree_class_check ((rli->t), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1801, __FUNCTION__))->type_common.align) - 1) : 0)) : (rli ->unpacked_align)); | |||
1802 | #endif | |||
1803 | ||||
1804 | unpacked_size = round_up (TYPE_SIZE (rli->t), rli->unpacked_align)round_up_loc (((location_t) 0), ((tree_class_check ((rli-> t), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1804, __FUNCTION__))->type_common.size), rli->unpacked_align ); | |||
1805 | if (simple_cst_equal (unpacked_size, TYPE_SIZE (rli->t)((tree_class_check ((rli->t), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1805, __FUNCTION__))->type_common.size))) | |||
1806 | { | |||
1807 | if (TYPE_NAME (rli->t)((tree_class_check ((rli->t), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1807, __FUNCTION__))->type_common.name)) | |||
1808 | { | |||
1809 | tree name; | |||
1810 | ||||
1811 | if (TREE_CODE (TYPE_NAME (rli->t))((enum tree_code) (((tree_class_check ((rli->t), (tcc_type ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1811, __FUNCTION__))->type_common.name))->base.code) == IDENTIFIER_NODE) | |||
1812 | name = TYPE_NAME (rli->t)((tree_class_check ((rli->t), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1812, __FUNCTION__))->type_common.name); | |||
1813 | else | |||
1814 | name = DECL_NAME (TYPE_NAME (rli->t))((contains_struct_check ((((tree_class_check ((rli->t), (tcc_type ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1814, __FUNCTION__))->type_common.name)), (TS_DECL_MINIMAL ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1814, __FUNCTION__))->decl_minimal.name); | |||
1815 | ||||
1816 | if (STRICT_ALIGNMENT0) | |||
1817 | warning (OPT_Wpacked, "packed attribute causes inefficient " | |||
1818 | "alignment for %qE", name); | |||
1819 | else | |||
1820 | warning (OPT_Wpacked, | |||
1821 | "packed attribute is unnecessary for %qE", name); | |||
1822 | } | |||
1823 | else | |||
1824 | { | |||
1825 | if (STRICT_ALIGNMENT0) | |||
1826 | warning (OPT_Wpacked, | |||
1827 | "packed attribute causes inefficient alignment"); | |||
1828 | else | |||
1829 | warning (OPT_Wpacked, "packed attribute is unnecessary"); | |||
1830 | } | |||
1831 | } | |||
1832 | } | |||
1833 | } | |||
1834 | ||||
1835 | /* Compute the TYPE_MODE for the TYPE (which is a RECORD_TYPE). */ | |||
1836 | ||||
1837 | void | |||
1838 | compute_record_mode (tree type) | |||
1839 | { | |||
1840 | tree field; | |||
1841 | machine_mode mode = VOIDmode((void) 0, E_VOIDmode); | |||
1842 | ||||
1843 | /* Most RECORD_TYPEs have BLKmode, so we start off assuming that. | |||
1844 | However, if possible, we use a mode that fits in a register | |||
1845 | instead, in order to allow for better optimization down the | |||
1846 | line. */ | |||
1847 | SET_TYPE_MODE (type, BLKmode)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1847, __FUNCTION__))->type_common.mode = (((void) 0, E_BLKmode ))); | |||
1848 | ||||
1849 | poly_uint64 type_size; | |||
1850 | if (!poly_int_tree_p (TYPE_SIZE (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1850, __FUNCTION__))->type_common.size), &type_size)) | |||
1851 | return; | |||
1852 | ||||
1853 | /* A record which has any BLKmode members must itself be | |||
1854 | BLKmode; it can't go in a register. Unless the member is | |||
1855 | BLKmode only because it isn't aligned. */ | |||
1856 | for (field = TYPE_FIELDS (type)((tree_check3 ((type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1856, __FUNCTION__, (RECORD_TYPE), (UNION_TYPE), (QUAL_UNION_TYPE )))->type_non_common.values); field; field = DECL_CHAIN (field)(((contains_struct_check (((contains_struct_check ((field), ( TS_DECL_MINIMAL), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1856, __FUNCTION__))), (TS_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1856, __FUNCTION__))->common.chain))) | |||
1857 | { | |||
1858 | if (TREE_CODE (field)((enum tree_code) (field)->base.code) != FIELD_DECL) | |||
1859 | continue; | |||
1860 | ||||
1861 | poly_uint64 field_size; | |||
1862 | if (TREE_CODE (TREE_TYPE (field))((enum tree_code) (((contains_struct_check ((field), (TS_TYPED ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1862, __FUNCTION__))->typed.type))->base.code) == ERROR_MARK | |||
1863 | || (TYPE_MODE (TREE_TYPE (field))((((enum tree_code) ((tree_class_check ((((contains_struct_check ((field), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1863, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1863, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (((contains_struct_check ((field), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1863, __FUNCTION__))->typed.type)) : (((contains_struct_check ((field), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1863, __FUNCTION__))->typed.type))->type_common.mode) == BLKmode((void) 0, E_BLKmode) | |||
1864 | && ! TYPE_NO_FORCE_BLK (TREE_TYPE (field))((tree_class_check ((((contains_struct_check ((field), (TS_TYPED ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1864, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1864, __FUNCTION__))->type_common.no_force_blk_flag) | |||
1865 | && !(TYPE_SIZE (TREE_TYPE (field))((tree_class_check ((((contains_struct_check ((field), (TS_TYPED ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1865, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1865, __FUNCTION__))->type_common.size) != 0 | |||
1866 | && integer_zerop (TYPE_SIZE (TREE_TYPE (field))((tree_class_check ((((contains_struct_check ((field), (TS_TYPED ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1866, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1866, __FUNCTION__))->type_common.size)))) | |||
1867 | || !tree_fits_poly_uint64_p (bit_position (field)) | |||
1868 | || DECL_SIZE (field)((contains_struct_check ((field), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1868, __FUNCTION__))->decl_common.size) == 0 | |||
1869 | || !poly_int_tree_p (DECL_SIZE (field)((contains_struct_check ((field), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1869, __FUNCTION__))->decl_common.size), &field_size)) | |||
1870 | return; | |||
1871 | ||||
1872 | /* If this field is the whole struct, remember its mode so | |||
1873 | that, say, we can put a double in a class into a DF | |||
1874 | register instead of forcing it to live in the stack. */ | |||
1875 | if (known_eq (field_size, type_size)(!maybe_ne (field_size, type_size)) | |||
1876 | /* Partial int types (e.g. __int20) may have TYPE_SIZE equal to | |||
1877 | wider types (e.g. int32), despite precision being less. Ensure | |||
1878 | that the TYPE_MODE of the struct does not get set to the partial | |||
1879 | int mode if there is a wider type also in the struct. */ | |||
1880 | && known_gt (GET_MODE_PRECISION (DECL_MODE (field)),(!maybe_le (GET_MODE_PRECISION (((contains_struct_check ((field ), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1880, __FUNCTION__))->decl_common.mode)), GET_MODE_PRECISION (mode))) | |||
1881 | GET_MODE_PRECISION (mode))(!maybe_le (GET_MODE_PRECISION (((contains_struct_check ((field ), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1880, __FUNCTION__))->decl_common.mode)), GET_MODE_PRECISION (mode)))) | |||
1882 | mode = DECL_MODE (field)((contains_struct_check ((field), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1882, __FUNCTION__))->decl_common.mode); | |||
1883 | ||||
1884 | /* With some targets, it is sub-optimal to access an aligned | |||
1885 | BLKmode structure as a scalar. */ | |||
1886 | if (targetm.member_type_forces_blk (field, mode)) | |||
1887 | return; | |||
1888 | } | |||
1889 | ||||
1890 | /* If we only have one real field; use its mode if that mode's size | |||
1891 | matches the type's size. This generally only applies to RECORD_TYPE. | |||
1892 | For UNION_TYPE, if the widest field is MODE_INT then use that mode. | |||
1893 | If the widest field is MODE_PARTIAL_INT, and the union will be passed | |||
1894 | by reference, then use that mode. */ | |||
1895 | if ((TREE_CODE (type)((enum tree_code) (type)->base.code) == RECORD_TYPE | |||
1896 | || (TREE_CODE (type)((enum tree_code) (type)->base.code) == UNION_TYPE | |||
1897 | && (GET_MODE_CLASS (mode)((enum mode_class) mode_class[mode]) == MODE_INT | |||
1898 | || (GET_MODE_CLASS (mode)((enum mode_class) mode_class[mode]) == MODE_PARTIAL_INT | |||
1899 | && (targetm.calls.pass_by_reference | |||
1900 | (pack_cumulative_args (0), | |||
1901 | function_arg_info (type, mode, /*named=*/false))))))) | |||
1902 | && mode != VOIDmode((void) 0, E_VOIDmode) | |||
1903 | && known_eq (GET_MODE_BITSIZE (mode), type_size)(!maybe_ne (GET_MODE_BITSIZE (mode), type_size))) | |||
1904 | ; | |||
1905 | else | |||
1906 | mode = mode_for_size_tree (TYPE_SIZE (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1906, __FUNCTION__))->type_common.size), MODE_INT, 1).else_blk (); | |||
1907 | ||||
1908 | /* If structure's known alignment is less than what the scalar | |||
1909 | mode would need, and it matters, then stick with BLKmode. */ | |||
1910 | if (mode != BLKmode((void) 0, E_BLKmode) | |||
1911 | && STRICT_ALIGNMENT0 | |||
1912 | && ! (TYPE_ALIGN (type)(((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1912, __FUNCTION__))->type_common.align) ? ((unsigned)1) << (((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1912, __FUNCTION__))->type_common.align) - 1) : 0) >= BIGGEST_ALIGNMENT(((global_options.x_target_flags & (1U << 12)) != 0 ) ? 32 : (((global_options.x_ix86_isa_flags & (1UL << 15)) != 0) ? 512 : (((global_options.x_ix86_isa_flags & ( 1UL << 8)) != 0) ? 256 : 128))) | |||
1913 | || TYPE_ALIGN (type)(((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1913, __FUNCTION__))->type_common.align) ? ((unsigned)1) << (((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1913, __FUNCTION__))->type_common.align) - 1) : 0) >= GET_MODE_ALIGNMENT (mode)get_mode_alignment (mode))) | |||
1914 | { | |||
1915 | /* If this is the only reason this type is BLKmode, then | |||
1916 | don't force containing types to be BLKmode. */ | |||
1917 | TYPE_NO_FORCE_BLK (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1917, __FUNCTION__))->type_common.no_force_blk_flag) = 1; | |||
1918 | mode = BLKmode((void) 0, E_BLKmode); | |||
1919 | } | |||
1920 | ||||
1921 | SET_TYPE_MODE (type, mode)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1921, __FUNCTION__))->type_common.mode = (mode)); | |||
1922 | } | |||
1923 | ||||
1924 | /* Compute TYPE_SIZE and TYPE_ALIGN for TYPE, once it has been laid | |||
1925 | out. */ | |||
1926 | ||||
1927 | static void | |||
1928 | finalize_type_size (tree type) | |||
1929 | { | |||
1930 | /* Normally, use the alignment corresponding to the mode chosen. | |||
1931 | However, where strict alignment is not required, avoid | |||
1932 | over-aligning structures, since most compilers do not do this | |||
1933 | alignment. */ | |||
1934 | bool tua_cleared_p = false; | |||
1935 | if (TYPE_MODE (type)((((enum tree_code) ((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1935, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (type) : (type)->type_common.mode) != BLKmode((void) 0, E_BLKmode) | |||
1936 | && TYPE_MODE (type)((((enum tree_code) ((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1936, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (type) : (type)->type_common.mode) != VOIDmode((void) 0, E_VOIDmode) | |||
1937 | && (STRICT_ALIGNMENT0 || !AGGREGATE_TYPE_P (type)(((enum tree_code) (type)->base.code) == ARRAY_TYPE || ((( enum tree_code) (type)->base.code) == RECORD_TYPE || ((enum tree_code) (type)->base.code) == UNION_TYPE || ((enum tree_code ) (type)->base.code) == QUAL_UNION_TYPE)))) | |||
1938 | { | |||
1939 | unsigned mode_align = GET_MODE_ALIGNMENT (TYPE_MODE (type))get_mode_alignment (((((enum tree_code) ((tree_class_check (( type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1939, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (type) : (type)->type_common.mode)); | |||
1940 | ||||
1941 | /* Don't override a larger alignment requirement coming from a user | |||
1942 | alignment of one of the fields. */ | |||
1943 | if (mode_align >= TYPE_ALIGN (type)(((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1943, __FUNCTION__))->type_common.align) ? ((unsigned)1) << (((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1943, __FUNCTION__))->type_common.align) - 1) : 0)) | |||
1944 | { | |||
1945 | SET_TYPE_ALIGN (type, mode_align)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1945, __FUNCTION__))->type_common.align = ffs_hwi (mode_align )); | |||
1946 | /* Remember that we're about to reset this flag. */ | |||
1947 | tua_cleared_p = TYPE_USER_ALIGN (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1947, __FUNCTION__))->base.u.bits.user_align); | |||
1948 | TYPE_USER_ALIGN (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1948, __FUNCTION__))->base.u.bits.user_align) = false; | |||
1949 | } | |||
1950 | } | |||
1951 | ||||
1952 | /* Do machine-dependent extra alignment. */ | |||
1953 | #ifdef ROUND_TYPE_ALIGN | |||
1954 | SET_TYPE_ALIGN (type,((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1955, __FUNCTION__))->type_common.align = ffs_hwi (ROUND_TYPE_ALIGN (type, (((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1955, __FUNCTION__))->type_common.align) ? ((unsigned)1) << (((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1955, __FUNCTION__))->type_common.align) - 1) : 0), (8)) )) | |||
1955 | ROUND_TYPE_ALIGN (type, TYPE_ALIGN (type), BITS_PER_UNIT))((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1955, __FUNCTION__))->type_common.align = ffs_hwi (ROUND_TYPE_ALIGN (type, (((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1955, __FUNCTION__))->type_common.align) ? ((unsigned)1) << (((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1955, __FUNCTION__))->type_common.align) - 1) : 0), (8)) )); | |||
1956 | #endif | |||
1957 | ||||
1958 | /* If we failed to find a simple way to calculate the unit size | |||
1959 | of the type, find it by division. */ | |||
1960 | if (TYPE_SIZE_UNIT (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1960, __FUNCTION__))->type_common.size_unit) == 0 && TYPE_SIZE (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1960, __FUNCTION__))->type_common.size) != 0) | |||
1961 | /* TYPE_SIZE (type) is computed in bitsizetype. After the division, the | |||
1962 | result will fit in sizetype. We will get more efficient code using | |||
1963 | sizetype, so we force a conversion. */ | |||
1964 | TYPE_SIZE_UNIT (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1964, __FUNCTION__))->type_common.size_unit) | |||
1965 | = fold_convert (sizetype,fold_convert_loc (((location_t) 0), sizetype_tab[(int) stk_sizetype ], size_binop_loc (((location_t) 0), FLOOR_DIV_EXPR, ((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1966, __FUNCTION__))->type_common.size), global_trees[TI_BITSIZE_UNIT ])) | |||
1966 | size_binop (FLOOR_DIV_EXPR, TYPE_SIZE (type),fold_convert_loc (((location_t) 0), sizetype_tab[(int) stk_sizetype ], size_binop_loc (((location_t) 0), FLOOR_DIV_EXPR, ((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1966, __FUNCTION__))->type_common.size), global_trees[TI_BITSIZE_UNIT ])) | |||
1967 | bitsize_unit_node))fold_convert_loc (((location_t) 0), sizetype_tab[(int) stk_sizetype ], size_binop_loc (((location_t) 0), FLOOR_DIV_EXPR, ((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1966, __FUNCTION__))->type_common.size), global_trees[TI_BITSIZE_UNIT ])); | |||
1968 | ||||
1969 | if (TYPE_SIZE (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1969, __FUNCTION__))->type_common.size) != 0) | |||
1970 | { | |||
1971 | TYPE_SIZE (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1971, __FUNCTION__))->type_common.size) = round_up (TYPE_SIZE (type), TYPE_ALIGN (type))round_up_loc (((location_t) 0), ((tree_class_check ((type), ( tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1971, __FUNCTION__))->type_common.size), (((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1971, __FUNCTION__))->type_common.align) ? ((unsigned)1) << (((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1971, __FUNCTION__))->type_common.align) - 1) : 0)); | |||
1972 | TYPE_SIZE_UNIT (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1972, __FUNCTION__))->type_common.size_unit) | |||
1973 | = round_up (TYPE_SIZE_UNIT (type), TYPE_ALIGN_UNIT (type))round_up_loc (((location_t) 0), ((tree_class_check ((type), ( tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1973, __FUNCTION__))->type_common.size_unit), ((((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1973, __FUNCTION__))->type_common.align) ? ((unsigned)1) << (((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1973, __FUNCTION__))->type_common.align) - 1) : 0) / (8) )); | |||
1974 | } | |||
1975 | ||||
1976 | /* Evaluate nonconstant sizes only once, either now or as soon as safe. */ | |||
1977 | if (TYPE_SIZE (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1977, __FUNCTION__))->type_common.size) != 0 && TREE_CODE (TYPE_SIZE (type))((enum tree_code) (((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1977, __FUNCTION__))->type_common.size))->base.code) != INTEGER_CST) | |||
1978 | TYPE_SIZE (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1978, __FUNCTION__))->type_common.size) = variable_size (TYPE_SIZE (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1978, __FUNCTION__))->type_common.size)); | |||
1979 | if (TYPE_SIZE_UNIT (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1979, __FUNCTION__))->type_common.size_unit) != 0 | |||
1980 | && TREE_CODE (TYPE_SIZE_UNIT (type))((enum tree_code) (((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1980, __FUNCTION__))->type_common.size_unit))->base.code ) != INTEGER_CST) | |||
1981 | TYPE_SIZE_UNIT (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1981, __FUNCTION__))->type_common.size_unit) = variable_size (TYPE_SIZE_UNIT (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1981, __FUNCTION__))->type_common.size_unit)); | |||
1982 | ||||
1983 | /* Handle empty records as per the x86-64 psABI. */ | |||
1984 | TYPE_EMPTY_P (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1984, __FUNCTION__))->type_common.empty_flag) = targetm.calls.empty_record_p (type); | |||
1985 | ||||
1986 | /* Also layout any other variants of the type. */ | |||
1987 | if (TYPE_NEXT_VARIANT (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1987, __FUNCTION__))->type_common.next_variant) | |||
1988 | || type != TYPE_MAIN_VARIANT (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1988, __FUNCTION__))->type_common.main_variant)) | |||
1989 | { | |||
1990 | tree variant; | |||
1991 | /* Record layout info of this variant. */ | |||
1992 | tree size = TYPE_SIZE (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1992, __FUNCTION__))->type_common.size); | |||
1993 | tree size_unit = TYPE_SIZE_UNIT (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1993, __FUNCTION__))->type_common.size_unit); | |||
1994 | unsigned int align = TYPE_ALIGN (type)(((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1994, __FUNCTION__))->type_common.align) ? ((unsigned)1) << (((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1994, __FUNCTION__))->type_common.align) - 1) : 0); | |||
1995 | unsigned int precision = TYPE_PRECISION (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1995, __FUNCTION__))->type_common.precision); | |||
1996 | unsigned int user_align = TYPE_USER_ALIGN (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1996, __FUNCTION__))->base.u.bits.user_align); | |||
1997 | machine_mode mode = TYPE_MODE (type)((((enum tree_code) ((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1997, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (type) : (type)->type_common.mode); | |||
1998 | bool empty_p = TYPE_EMPTY_P (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1998, __FUNCTION__))->type_common.empty_flag); | |||
1999 | bool typeless = AGGREGATE_TYPE_P (type)(((enum tree_code) (type)->base.code) == ARRAY_TYPE || ((( enum tree_code) (type)->base.code) == RECORD_TYPE || ((enum tree_code) (type)->base.code) == UNION_TYPE || ((enum tree_code ) (type)->base.code) == QUAL_UNION_TYPE)) && TYPE_TYPELESS_STORAGE (type)((tree_check4 ((type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 1999, __FUNCTION__, (RECORD_TYPE), (UNION_TYPE), (QUAL_UNION_TYPE ), (ARRAY_TYPE)))->type_common.typeless_storage); | |||
2000 | ||||
2001 | /* Copy it into all variants. */ | |||
2002 | for (variant = TYPE_MAIN_VARIANT (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2002, __FUNCTION__))->type_common.main_variant); | |||
2003 | variant != NULL_TREE(tree) nullptr; | |||
2004 | variant = TYPE_NEXT_VARIANT (variant)((tree_class_check ((variant), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2004, __FUNCTION__))->type_common.next_variant)) | |||
2005 | { | |||
2006 | TYPE_SIZE (variant)((tree_class_check ((variant), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2006, __FUNCTION__))->type_common.size) = size; | |||
2007 | TYPE_SIZE_UNIT (variant)((tree_class_check ((variant), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2007, __FUNCTION__))->type_common.size_unit) = size_unit; | |||
2008 | unsigned valign = align; | |||
2009 | if (TYPE_USER_ALIGN (variant)((tree_class_check ((variant), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2009, __FUNCTION__))->base.u.bits.user_align)) | |||
2010 | { | |||
2011 | valign = MAX (valign, TYPE_ALIGN (variant))((valign) > ((((tree_class_check ((variant), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2011, __FUNCTION__))->type_common.align) ? ((unsigned)1) << (((tree_class_check ((variant), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2011, __FUNCTION__))->type_common.align) - 1) : 0)) ? (valign ) : ((((tree_class_check ((variant), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2011, __FUNCTION__))->type_common.align) ? ((unsigned)1) << (((tree_class_check ((variant), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2011, __FUNCTION__))->type_common.align) - 1) : 0))); | |||
2012 | /* If we reset TYPE_USER_ALIGN on the main variant, we might | |||
2013 | need to reset it on the variants too. TYPE_MODE will be set | |||
2014 | to MODE in this variant, so we can use that. */ | |||
2015 | if (tua_cleared_p && GET_MODE_ALIGNMENT (mode)get_mode_alignment (mode) >= valign) | |||
2016 | TYPE_USER_ALIGN (variant)((tree_class_check ((variant), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2016, __FUNCTION__))->base.u.bits.user_align) = false; | |||
2017 | } | |||
2018 | else | |||
2019 | TYPE_USER_ALIGN (variant)((tree_class_check ((variant), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2019, __FUNCTION__))->base.u.bits.user_align) = user_align; | |||
2020 | SET_TYPE_ALIGN (variant, valign)((tree_class_check ((variant), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2020, __FUNCTION__))->type_common.align = ffs_hwi (valign )); | |||
2021 | TYPE_PRECISION (variant)((tree_class_check ((variant), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2021, __FUNCTION__))->type_common.precision) = precision; | |||
2022 | SET_TYPE_MODE (variant, mode)((tree_class_check ((variant), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2022, __FUNCTION__))->type_common.mode = (mode)); | |||
2023 | TYPE_EMPTY_P (variant)((tree_class_check ((variant), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2023, __FUNCTION__))->type_common.empty_flag) = empty_p; | |||
2024 | if (AGGREGATE_TYPE_P (variant)(((enum tree_code) (variant)->base.code) == ARRAY_TYPE || ( ((enum tree_code) (variant)->base.code) == RECORD_TYPE || ( (enum tree_code) (variant)->base.code) == UNION_TYPE || (( enum tree_code) (variant)->base.code) == QUAL_UNION_TYPE))) | |||
2025 | TYPE_TYPELESS_STORAGE (variant)((tree_check4 ((variant), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2025, __FUNCTION__, (RECORD_TYPE), (UNION_TYPE), (QUAL_UNION_TYPE ), (ARRAY_TYPE)))->type_common.typeless_storage) = typeless; | |||
2026 | } | |||
2027 | } | |||
2028 | } | |||
2029 | ||||
2030 | /* Return a new underlying object for a bitfield started with FIELD. */ | |||
2031 | ||||
2032 | static tree | |||
2033 | start_bitfield_representative (tree field) | |||
2034 | { | |||
2035 | tree repr = make_node (FIELD_DECL); | |||
2036 | DECL_FIELD_OFFSET (repr)((tree_check ((repr), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2036, __FUNCTION__, (FIELD_DECL)))->field_decl.offset) = DECL_FIELD_OFFSET (field)((tree_check ((field), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2036, __FUNCTION__, (FIELD_DECL)))->field_decl.offset); | |||
2037 | /* Force the representative to begin at a BITS_PER_UNIT aligned | |||
2038 | boundary - C++ may use tail-padding of a base object to | |||
2039 | continue packing bits so the bitfield region does not start | |||
2040 | at bit zero (see g++.dg/abi/bitfield5.C for example). | |||
2041 | Unallocated bits may happen for other reasons as well, | |||
2042 | for example Ada which allows explicit bit-granular structure layout. */ | |||
2043 | DECL_FIELD_BIT_OFFSET (repr)((tree_check ((repr), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2043, __FUNCTION__, (FIELD_DECL)))->field_decl.bit_offset ) | |||
2044 | = size_binop (BIT_AND_EXPR,size_binop_loc (((location_t) 0), BIT_AND_EXPR, ((tree_check ( (field), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2045, __FUNCTION__, (FIELD_DECL)))->field_decl.bit_offset ), size_int_kind (~((8) - 1), stk_bitsizetype)) | |||
2045 | DECL_FIELD_BIT_OFFSET (field),size_binop_loc (((location_t) 0), BIT_AND_EXPR, ((tree_check ( (field), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2045, __FUNCTION__, (FIELD_DECL)))->field_decl.bit_offset ), size_int_kind (~((8) - 1), stk_bitsizetype)) | |||
2046 | bitsize_int (~(BITS_PER_UNIT - 1)))size_binop_loc (((location_t) 0), BIT_AND_EXPR, ((tree_check ( (field), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2045, __FUNCTION__, (FIELD_DECL)))->field_decl.bit_offset ), size_int_kind (~((8) - 1), stk_bitsizetype)); | |||
2047 | SET_DECL_OFFSET_ALIGN (repr, DECL_OFFSET_ALIGN (field))((tree_check ((repr), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2047, __FUNCTION__, (FIELD_DECL)))->decl_common.off_align = ffs_hwi ((((unsigned long)1) << (tree_check ((field) , "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2047, __FUNCTION__, (FIELD_DECL)))->decl_common.off_align )) - 1); | |||
2048 | DECL_SIZE (repr)((contains_struct_check ((repr), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2048, __FUNCTION__))->decl_common.size) = DECL_SIZE (field)((contains_struct_check ((field), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2048, __FUNCTION__))->decl_common.size); | |||
2049 | DECL_SIZE_UNIT (repr)((contains_struct_check ((repr), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2049, __FUNCTION__))->decl_common.size_unit) = DECL_SIZE_UNIT (field)((contains_struct_check ((field), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2049, __FUNCTION__))->decl_common.size_unit); | |||
2050 | DECL_PACKED (repr)((tree_check ((repr), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2050, __FUNCTION__, (FIELD_DECL)))->base.u.bits.packed_flag ) = DECL_PACKED (field)((tree_check ((field), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2050, __FUNCTION__, (FIELD_DECL)))->base.u.bits.packed_flag ); | |||
2051 | DECL_CONTEXT (repr)((contains_struct_check ((repr), (TS_DECL_MINIMAL), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2051, __FUNCTION__))->decl_minimal.context) = DECL_CONTEXT (field)((contains_struct_check ((field), (TS_DECL_MINIMAL), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2051, __FUNCTION__))->decl_minimal.context); | |||
2052 | /* There are no indirect accesses to this field. If we introduce | |||
2053 | some then they have to use the record alias set. This makes | |||
2054 | sure to properly conflict with [indirect] accesses to addressable | |||
2055 | fields of the bitfield group. */ | |||
2056 | DECL_NONADDRESSABLE_P (repr)((tree_check ((repr), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2056, __FUNCTION__, (FIELD_DECL)))->decl_common.decl_flag_2 ) = 1; | |||
2057 | return repr; | |||
2058 | } | |||
2059 | ||||
2060 | /* Finish up a bitfield group that was started by creating the underlying | |||
2061 | object REPR with the last field in the bitfield group FIELD. */ | |||
2062 | ||||
2063 | static void | |||
2064 | finish_bitfield_representative (tree repr, tree field) | |||
2065 | { | |||
2066 | unsigned HOST_WIDE_INTlong bitsize, maxbitsize; | |||
2067 | tree nextf, size; | |||
2068 | ||||
2069 | size = size_diffop (DECL_FIELD_OFFSET (field),size_diffop_loc (((location_t) 0), ((tree_check ((field), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2069, __FUNCTION__, (FIELD_DECL)))->field_decl.offset), ( (tree_check ((repr), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2070, __FUNCTION__, (FIELD_DECL)))->field_decl.offset)) | |||
2070 | DECL_FIELD_OFFSET (repr))size_diffop_loc (((location_t) 0), ((tree_check ((field), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2069, __FUNCTION__, (FIELD_DECL)))->field_decl.offset), ( (tree_check ((repr), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2070, __FUNCTION__, (FIELD_DECL)))->field_decl.offset)); | |||
2071 | while (TREE_CODE (size)((enum tree_code) (size)->base.code) == COMPOUND_EXPR) | |||
2072 | size = TREE_OPERAND (size, 1)(*((const_cast<tree*> (tree_operand_check ((size), (1), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2072, __FUNCTION__))))); | |||
2073 | gcc_assert (tree_fits_uhwi_p (size))((void)(!(tree_fits_uhwi_p (size)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2073, __FUNCTION__), 0 : 0)); | |||
2074 | bitsize = (tree_to_uhwi (size) * BITS_PER_UNIT(8) | |||
2075 | + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field)((tree_check ((field), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2075, __FUNCTION__, (FIELD_DECL)))->field_decl.bit_offset )) | |||
2076 | - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)((tree_check ((repr), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2076, __FUNCTION__, (FIELD_DECL)))->field_decl.bit_offset )) | |||
2077 | + tree_to_uhwi (DECL_SIZE (field)((contains_struct_check ((field), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2077, __FUNCTION__))->decl_common.size))); | |||
2078 | ||||
2079 | /* Round up bitsize to multiples of BITS_PER_UNIT. */ | |||
2080 | bitsize = (bitsize + BITS_PER_UNIT(8) - 1) & ~(BITS_PER_UNIT(8) - 1); | |||
2081 | ||||
2082 | /* Now nothing tells us how to pad out bitsize ... */ | |||
2083 | if (TREE_CODE (DECL_CONTEXT (field))((enum tree_code) (((contains_struct_check ((field), (TS_DECL_MINIMAL ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2083, __FUNCTION__))->decl_minimal.context))->base.code ) == RECORD_TYPE) | |||
2084 | { | |||
2085 | nextf = DECL_CHAIN (field)(((contains_struct_check (((contains_struct_check ((field), ( TS_DECL_MINIMAL), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2085, __FUNCTION__))), (TS_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2085, __FUNCTION__))->common.chain)); | |||
2086 | while (nextf && TREE_CODE (nextf)((enum tree_code) (nextf)->base.code) != FIELD_DECL) | |||
2087 | nextf = DECL_CHAIN (nextf)(((contains_struct_check (((contains_struct_check ((nextf), ( TS_DECL_MINIMAL), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2087, __FUNCTION__))), (TS_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2087, __FUNCTION__))->common.chain)); | |||
2088 | } | |||
2089 | else | |||
2090 | nextf = NULL_TREE(tree) nullptr; | |||
2091 | if (nextf) | |||
2092 | { | |||
2093 | tree maxsize; | |||
2094 | /* If there was an error, the field may be not laid out | |||
2095 | correctly. Don't bother to do anything. */ | |||
2096 | if (TREE_TYPE (nextf)((contains_struct_check ((nextf), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2096, __FUNCTION__))->typed.type) == error_mark_nodeglobal_trees[TI_ERROR_MARK]) | |||
2097 | { | |||
2098 | TREE_TYPE (repr)((contains_struct_check ((repr), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2098, __FUNCTION__))->typed.type) = error_mark_nodeglobal_trees[TI_ERROR_MARK]; | |||
2099 | return; | |||
2100 | } | |||
2101 | maxsize = size_diffop (DECL_FIELD_OFFSET (nextf),size_diffop_loc (((location_t) 0), ((tree_check ((nextf), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2101, __FUNCTION__, (FIELD_DECL)))->field_decl.offset), ( (tree_check ((repr), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2102, __FUNCTION__, (FIELD_DECL)))->field_decl.offset)) | |||
2102 | DECL_FIELD_OFFSET (repr))size_diffop_loc (((location_t) 0), ((tree_check ((nextf), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2101, __FUNCTION__, (FIELD_DECL)))->field_decl.offset), ( (tree_check ((repr), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2102, __FUNCTION__, (FIELD_DECL)))->field_decl.offset)); | |||
2103 | if (tree_fits_uhwi_p (maxsize)) | |||
2104 | { | |||
2105 | maxbitsize = (tree_to_uhwi (maxsize) * BITS_PER_UNIT(8) | |||
2106 | + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (nextf)((tree_check ((nextf), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2106, __FUNCTION__, (FIELD_DECL)))->field_decl.bit_offset )) | |||
2107 | - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)((tree_check ((repr), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2107, __FUNCTION__, (FIELD_DECL)))->field_decl.bit_offset ))); | |||
2108 | /* If the group ends within a bitfield nextf does not need to be | |||
2109 | aligned to BITS_PER_UNIT. Thus round up. */ | |||
2110 | maxbitsize = (maxbitsize + BITS_PER_UNIT(8) - 1) & ~(BITS_PER_UNIT(8) - 1); | |||
2111 | } | |||
2112 | else | |||
2113 | maxbitsize = bitsize; | |||
2114 | } | |||
2115 | else | |||
2116 | { | |||
2117 | /* Note that if the C++ FE sets up tail-padding to be re-used it | |||
2118 | creates a as-base variant of the type with TYPE_SIZE adjusted | |||
2119 | accordingly. So it is safe to include tail-padding here. */ | |||
2120 | tree aggsize = lang_hooks.types.unit_size_without_reusable_padding | |||
2121 | (DECL_CONTEXT (field)((contains_struct_check ((field), (TS_DECL_MINIMAL), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2121, __FUNCTION__))->decl_minimal.context)); | |||
2122 | tree maxsize = size_diffop (aggsize, DECL_FIELD_OFFSET (repr))size_diffop_loc (((location_t) 0), aggsize, ((tree_check ((repr ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2122, __FUNCTION__, (FIELD_DECL)))->field_decl.offset)); | |||
2123 | /* We cannot generally rely on maxsize to fold to an integer constant, | |||
2124 | so use bitsize as fallback for this case. */ | |||
2125 | if (tree_fits_uhwi_p (maxsize)) | |||
2126 | maxbitsize = (tree_to_uhwi (maxsize) * BITS_PER_UNIT(8) | |||
2127 | - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)((tree_check ((repr), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2127, __FUNCTION__, (FIELD_DECL)))->field_decl.bit_offset ))); | |||
2128 | else | |||
2129 | maxbitsize = bitsize; | |||
2130 | } | |||
2131 | ||||
2132 | /* Only if we don't artificially break up the representative in | |||
2133 | the middle of a large bitfield with different possibly | |||
2134 | overlapping representatives. And all representatives start | |||
2135 | at byte offset. */ | |||
2136 | gcc_assert (maxbitsize % BITS_PER_UNIT == 0)((void)(!(maxbitsize % (8) == 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2136, __FUNCTION__), 0 : 0)); | |||
2137 | ||||
2138 | /* Find the smallest nice mode to use. */ | |||
2139 | opt_scalar_int_mode mode_iter; | |||
2140 | FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_INT)for (mode_iterator::start (&(mode_iter), MODE_INT); mode_iterator ::iterate_p (&(mode_iter)); mode_iterator::get_next (& (mode_iter))) | |||
2141 | if (GET_MODE_BITSIZE (mode_iter.require ()) >= bitsize) | |||
2142 | break; | |||
2143 | ||||
2144 | scalar_int_mode mode; | |||
2145 | if (!mode_iter.exists (&mode) | |||
2146 | || GET_MODE_BITSIZE (mode) > maxbitsize | |||
2147 | || GET_MODE_BITSIZE (mode) > MAX_FIXED_MODE_SIZEGET_MODE_BITSIZE (((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) ? (scalar_int_mode ((scalar_int_mode::from_int ) E_TImode)) : (scalar_int_mode ((scalar_int_mode::from_int) E_DImode )))) | |||
2148 | { | |||
2149 | /* We really want a BLKmode representative only as a last resort, | |||
2150 | considering the member b in | |||
2151 | struct { int a : 7; int b : 17; int c; } __attribute__((packed)); | |||
2152 | Otherwise we simply want to split the representative up | |||
2153 | allowing for overlaps within the bitfield region as required for | |||
2154 | struct { int a : 7; int b : 7; | |||
2155 | int c : 10; int d; } __attribute__((packed)); | |||
2156 | [0, 15] HImode for a and b, [8, 23] HImode for c. */ | |||
2157 | DECL_SIZE (repr)((contains_struct_check ((repr), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2157, __FUNCTION__))->decl_common.size) = bitsize_int (bitsize)size_int_kind (bitsize, stk_bitsizetype); | |||
2158 | DECL_SIZE_UNIT (repr)((contains_struct_check ((repr), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2158, __FUNCTION__))->decl_common.size_unit) = size_int (bitsize / BITS_PER_UNIT)size_int_kind (bitsize / (8), stk_sizetype); | |||
2159 | SET_DECL_MODE (repr, BLKmode)((contains_struct_check ((repr), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2159, __FUNCTION__))->decl_common.mode = (((void) 0, E_BLKmode ))); | |||
2160 | TREE_TYPE (repr)((contains_struct_check ((repr), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2160, __FUNCTION__))->typed.type) = build_array_type_nelts (unsigned_char_type_nodeinteger_types[itk_unsigned_char], | |||
2161 | bitsize / BITS_PER_UNIT(8)); | |||
2162 | } | |||
2163 | else | |||
2164 | { | |||
2165 | unsigned HOST_WIDE_INTlong modesize = GET_MODE_BITSIZE (mode); | |||
2166 | DECL_SIZE (repr)((contains_struct_check ((repr), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2166, __FUNCTION__))->decl_common.size) = bitsize_int (modesize)size_int_kind (modesize, stk_bitsizetype); | |||
2167 | DECL_SIZE_UNIT (repr)((contains_struct_check ((repr), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2167, __FUNCTION__))->decl_common.size_unit) = size_int (modesize / BITS_PER_UNIT)size_int_kind (modesize / (8), stk_sizetype); | |||
2168 | SET_DECL_MODE (repr, mode)((contains_struct_check ((repr), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2168, __FUNCTION__))->decl_common.mode = (mode)); | |||
2169 | TREE_TYPE (repr)((contains_struct_check ((repr), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2169, __FUNCTION__))->typed.type) = lang_hooks.types.type_for_mode (mode, 1); | |||
2170 | } | |||
2171 | ||||
2172 | /* Remember whether the bitfield group is at the end of the | |||
2173 | structure or not. */ | |||
2174 | DECL_CHAIN (repr)(((contains_struct_check (((contains_struct_check ((repr), (TS_DECL_MINIMAL ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2174, __FUNCTION__))), (TS_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2174, __FUNCTION__))->common.chain)) = nextf; | |||
2175 | } | |||
2176 | ||||
2177 | /* Compute and set FIELD_DECLs for the underlying objects we should | |||
2178 | use for bitfield access for the structure T. */ | |||
2179 | ||||
2180 | void | |||
2181 | finish_bitfield_layout (tree t) | |||
2182 | { | |||
2183 | tree field, prev; | |||
2184 | tree repr = NULL_TREE(tree) nullptr; | |||
2185 | ||||
2186 | if (TREE_CODE (t)((enum tree_code) (t)->base.code) == QUAL_UNION_TYPE) | |||
2187 | return; | |||
2188 | ||||
2189 | for (prev = NULL_TREE(tree) nullptr, field = TYPE_FIELDS (t)((tree_check3 ((t), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2189, __FUNCTION__, (RECORD_TYPE), (UNION_TYPE), (QUAL_UNION_TYPE )))->type_non_common.values); | |||
2190 | field; field = DECL_CHAIN (field)(((contains_struct_check (((contains_struct_check ((field), ( TS_DECL_MINIMAL), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2190, __FUNCTION__))), (TS_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2190, __FUNCTION__))->common.chain))) | |||
2191 | { | |||
2192 | if (TREE_CODE (field)((enum tree_code) (field)->base.code) != FIELD_DECL) | |||
2193 | continue; | |||
2194 | ||||
2195 | /* In the C++ memory model, consecutive bit fields in a structure are | |||
2196 | considered one memory location and updating a memory location | |||
2197 | may not store into adjacent memory locations. */ | |||
2198 | if (!repr | |||
2199 | && DECL_BIT_FIELD_TYPE (field)((tree_check ((field), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2199, __FUNCTION__, (FIELD_DECL)))->field_decl.bit_field_type )) | |||
2200 | { | |||
2201 | /* Start new representative. */ | |||
2202 | repr = start_bitfield_representative (field); | |||
2203 | } | |||
2204 | else if (repr | |||
2205 | && ! DECL_BIT_FIELD_TYPE (field)((tree_check ((field), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2205, __FUNCTION__, (FIELD_DECL)))->field_decl.bit_field_type )) | |||
2206 | { | |||
2207 | /* Finish off new representative. */ | |||
2208 | finish_bitfield_representative (repr, prev); | |||
2209 | repr = NULL_TREE(tree) nullptr; | |||
2210 | } | |||
2211 | else if (DECL_BIT_FIELD_TYPE (field)((tree_check ((field), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2211, __FUNCTION__, (FIELD_DECL)))->field_decl.bit_field_type )) | |||
2212 | { | |||
2213 | gcc_assert (repr != NULL_TREE)((void)(!(repr != (tree) nullptr) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2213, __FUNCTION__), 0 : 0)); | |||
2214 | ||||
2215 | /* Zero-size bitfields finish off a representative and | |||
2216 | do not have a representative themselves. This is | |||
2217 | required by the C++ memory model. */ | |||
2218 | if (integer_zerop (DECL_SIZE (field)((contains_struct_check ((field), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2218, __FUNCTION__))->decl_common.size))) | |||
2219 | { | |||
2220 | finish_bitfield_representative (repr, prev); | |||
2221 | repr = NULL_TREE(tree) nullptr; | |||
2222 | } | |||
2223 | ||||
2224 | /* We assume that either DECL_FIELD_OFFSET of the representative | |||
2225 | and each bitfield member is a constant or they are equal. | |||
2226 | This is because we need to be able to compute the bit-offset | |||
2227 | of each field relative to the representative in get_bit_range | |||
2228 | during RTL expansion. | |||
2229 | If these constraints are not met, simply force a new | |||
2230 | representative to be generated. That will at most | |||
2231 | generate worse code but still maintain correctness with | |||
2232 | respect to the C++ memory model. */ | |||
2233 | else if (!((tree_fits_uhwi_p (DECL_FIELD_OFFSET (repr)((tree_check ((repr), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2233, __FUNCTION__, (FIELD_DECL)))->field_decl.offset)) | |||
2234 | && tree_fits_uhwi_p (DECL_FIELD_OFFSET (field)((tree_check ((field), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2234, __FUNCTION__, (FIELD_DECL)))->field_decl.offset))) | |||
2235 | || operand_equal_p (DECL_FIELD_OFFSET (repr)((tree_check ((repr), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2235, __FUNCTION__, (FIELD_DECL)))->field_decl.offset), | |||
2236 | DECL_FIELD_OFFSET (field)((tree_check ((field), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2236, __FUNCTION__, (FIELD_DECL)))->field_decl.offset), 0))) | |||
2237 | { | |||
2238 | finish_bitfield_representative (repr, prev); | |||
2239 | repr = start_bitfield_representative (field); | |||
2240 | } | |||
2241 | } | |||
2242 | else | |||
2243 | continue; | |||
2244 | ||||
2245 | if (repr) | |||
2246 | DECL_BIT_FIELD_REPRESENTATIVE (field)((tree_check ((field), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2246, __FUNCTION__, (FIELD_DECL)))->field_decl.qualifier ) = repr; | |||
2247 | ||||
2248 | if (TREE_CODE (t)((enum tree_code) (t)->base.code) == RECORD_TYPE) | |||
2249 | prev = field; | |||
2250 | else if (repr) | |||
2251 | { | |||
2252 | finish_bitfield_representative (repr, field); | |||
2253 | repr = NULL_TREE(tree) nullptr; | |||
2254 | } | |||
2255 | } | |||
2256 | ||||
2257 | if (repr) | |||
2258 | finish_bitfield_representative (repr, prev); | |||
2259 | } | |||
2260 | ||||
2261 | /* Do all of the work required to layout the type indicated by RLI, | |||
2262 | once the fields have been laid out. This function will call `free' | |||
2263 | for RLI, unless FREE_P is false. Passing a value other than false | |||
2264 | for FREE_P is bad practice; this option only exists to support the | |||
2265 | G++ 3.2 ABI. */ | |||
2266 | ||||
2267 | void | |||
2268 | finish_record_layout (record_layout_info rli, int free_p) | |||
2269 | { | |||
2270 | tree variant; | |||
2271 | ||||
2272 | /* Compute the final size. */ | |||
2273 | finalize_record_size (rli); | |||
2274 | ||||
2275 | /* Compute the TYPE_MODE for the record. */ | |||
2276 | compute_record_mode (rli->t); | |||
2277 | ||||
2278 | /* Perform any last tweaks to the TYPE_SIZE, etc. */ | |||
2279 | finalize_type_size (rli->t); | |||
2280 | ||||
2281 | /* Compute bitfield representatives. */ | |||
2282 | finish_bitfield_layout (rli->t); | |||
2283 | ||||
2284 | /* Propagate TYPE_PACKED and TYPE_REVERSE_STORAGE_ORDER to variants. | |||
2285 | With C++ templates, it is too early to do this when the attribute | |||
2286 | is being parsed. */ | |||
2287 | for (variant = TYPE_NEXT_VARIANT (rli->t)((tree_class_check ((rli->t), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2287, __FUNCTION__))->type_common.next_variant); variant; | |||
2288 | variant = TYPE_NEXT_VARIANT (variant)((tree_class_check ((variant), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2288, __FUNCTION__))->type_common.next_variant)) | |||
2289 | { | |||
2290 | TYPE_PACKED (variant)((tree_class_check ((variant), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2290, __FUNCTION__))->base.u.bits.packed_flag) = TYPE_PACKED (rli->t)((tree_class_check ((rli->t), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2290, __FUNCTION__))->base.u.bits.packed_flag); | |||
2291 | TYPE_REVERSE_STORAGE_ORDER (variant)((tree_check4 ((variant), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2291, __FUNCTION__, (RECORD_TYPE), (UNION_TYPE), (QUAL_UNION_TYPE ), (ARRAY_TYPE)))->base.u.bits.saturating_flag) | |||
2292 | = TYPE_REVERSE_STORAGE_ORDER (rli->t)((tree_check4 ((rli->t), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2292, __FUNCTION__, (RECORD_TYPE), (UNION_TYPE), (QUAL_UNION_TYPE ), (ARRAY_TYPE)))->base.u.bits.saturating_flag); | |||
2293 | } | |||
2294 | ||||
2295 | /* Lay out any static members. This is done now because their type | |||
2296 | may use the record's type. */ | |||
2297 | while (!vec_safe_is_empty (rli->pending_statics)) | |||
2298 | layout_decl (rli->pending_statics->pop (), 0); | |||
2299 | ||||
2300 | /* Clean up. */ | |||
2301 | if (free_p) | |||
2302 | { | |||
2303 | vec_free (rli->pending_statics); | |||
2304 | free (rli); | |||
2305 | } | |||
2306 | } | |||
2307 | ||||
2308 | ||||
2309 | /* Finish processing a builtin RECORD_TYPE type TYPE. It's name is | |||
2310 | NAME, its fields are chained in reverse on FIELDS. | |||
2311 | ||||
2312 | If ALIGN_TYPE is non-null, it is given the same alignment as | |||
2313 | ALIGN_TYPE. */ | |||
2314 | ||||
2315 | void | |||
2316 | finish_builtin_struct (tree type, const char *name, tree fields, | |||
2317 | tree align_type) | |||
2318 | { | |||
2319 | tree tail, next; | |||
2320 | ||||
2321 | for (tail = NULL_TREE(tree) nullptr; fields; tail = fields, fields = next) | |||
2322 | { | |||
2323 | DECL_FIELD_CONTEXT (fields)((tree_check ((fields), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2323, __FUNCTION__, (FIELD_DECL)))->decl_minimal.context ) = type; | |||
2324 | next = DECL_CHAIN (fields)(((contains_struct_check (((contains_struct_check ((fields), ( TS_DECL_MINIMAL), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2324, __FUNCTION__))), (TS_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2324, __FUNCTION__))->common.chain)); | |||
2325 | DECL_CHAIN (fields)(((contains_struct_check (((contains_struct_check ((fields), ( TS_DECL_MINIMAL), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2325, __FUNCTION__))), (TS_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2325, __FUNCTION__))->common.chain)) = tail; | |||
2326 | } | |||
2327 | TYPE_FIELDS (type)((tree_check3 ((type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2327, __FUNCTION__, (RECORD_TYPE), (UNION_TYPE), (QUAL_UNION_TYPE )))->type_non_common.values) = tail; | |||
2328 | ||||
2329 | if (align_type) | |||
2330 | { | |||
2331 | SET_TYPE_ALIGN (type, TYPE_ALIGN (align_type))((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2331, __FUNCTION__))->type_common.align = ffs_hwi ((((tree_class_check ((align_type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2331, __FUNCTION__))->type_common.align) ? ((unsigned)1) << (((tree_class_check ((align_type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2331, __FUNCTION__))->type_common.align) - 1) : 0))); | |||
2332 | TYPE_USER_ALIGN (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2332, __FUNCTION__))->base.u.bits.user_align) = TYPE_USER_ALIGN (align_type)((tree_class_check ((align_type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2332, __FUNCTION__))->base.u.bits.user_align); | |||
2333 | SET_TYPE_WARN_IF_NOT_ALIGN (type,((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2334, __FUNCTION__))->type_common.warn_if_not_align = ffs_hwi (((tree_class_check ((align_type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2334, __FUNCTION__))->type_common.warn_if_not_align ? (( unsigned)1) << ((align_type)->type_common.warn_if_not_align - 1) : 0))) | |||
2334 | TYPE_WARN_IF_NOT_ALIGN (align_type))((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2334, __FUNCTION__))->type_common.warn_if_not_align = ffs_hwi (((tree_class_check ((align_type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2334, __FUNCTION__))->type_common.warn_if_not_align ? (( unsigned)1) << ((align_type)->type_common.warn_if_not_align - 1) : 0))); | |||
2335 | } | |||
2336 | ||||
2337 | layout_type (type); | |||
2338 | #if 0 /* not yet, should get fixed properly later */ | |||
2339 | TYPE_NAME (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2339, __FUNCTION__))->type_common.name) = make_type_decl (get_identifier (name)(__builtin_constant_p (name) ? get_identifier_with_length ((name ), strlen (name)) : get_identifier (name)), type); | |||
2340 | #else | |||
2341 | TYPE_NAME (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2341, __FUNCTION__))->type_common.name) = build_decl (BUILTINS_LOCATION((location_t) 1), | |||
2342 | TYPE_DECL, get_identifier (name)(__builtin_constant_p (name) ? get_identifier_with_length ((name ), strlen (name)) : get_identifier (name)), type); | |||
2343 | #endif | |||
2344 | TYPE_STUB_DECL (type)(((contains_struct_check (((tree_class_check ((type), (tcc_type ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2344, __FUNCTION__))), (TS_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2344, __FUNCTION__))->common.chain)) = TYPE_NAME (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2344, __FUNCTION__))->type_common.name); | |||
2345 | layout_decl (TYPE_NAME (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2345, __FUNCTION__))->type_common.name), 0); | |||
2346 | } | |||
2347 | ||||
2348 | /* Calculate the mode, size, and alignment for TYPE. | |||
2349 | For an array type, calculate the element separation as well. | |||
2350 | Record TYPE on the chain of permanent or temporary types | |||
2351 | so that dbxout will find out about it. | |||
2352 | ||||
2353 | TYPE_SIZE of a type is nonzero if the type has been laid out already. | |||
2354 | layout_type does nothing on such a type. | |||
2355 | ||||
2356 | If the type is incomplete, its TYPE_SIZE remains zero. */ | |||
2357 | ||||
2358 | void | |||
2359 | layout_type (tree type) | |||
2360 | { | |||
2361 | gcc_assert (type)((void)(!(type) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2361, __FUNCTION__), 0 : 0)); | |||
| ||||
2362 | ||||
2363 | if (type == error_mark_nodeglobal_trees[TI_ERROR_MARK]) | |||
2364 | return; | |||
2365 | ||||
2366 | /* We don't want finalize_type_size to copy an alignment attribute to | |||
2367 | variants that don't have it. */ | |||
2368 | type = TYPE_MAIN_VARIANT (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2368, __FUNCTION__))->type_common.main_variant); | |||
2369 | ||||
2370 | /* Do nothing if type has been laid out before. */ | |||
2371 | if (TYPE_SIZE (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2371, __FUNCTION__))->type_common.size)) | |||
2372 | return; | |||
2373 | ||||
2374 | switch (TREE_CODE (type)((enum tree_code) (type)->base.code)) | |||
2375 | { | |||
2376 | case LANG_TYPE: | |||
2377 | /* This kind of type is the responsibility | |||
2378 | of the language-specific code. */ | |||
2379 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2379, __FUNCTION__)); | |||
2380 | ||||
2381 | case BOOLEAN_TYPE: | |||
2382 | case INTEGER_TYPE: | |||
2383 | case ENUMERAL_TYPE: | |||
2384 | { | |||
2385 | scalar_int_mode mode | |||
2386 | = smallest_int_mode_for_size (TYPE_PRECISION (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2386, __FUNCTION__))->type_common.precision)); | |||
2387 | SET_TYPE_MODE (type, mode)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2387, __FUNCTION__))->type_common.mode = (mode)); | |||
2388 | TYPE_SIZE (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2388, __FUNCTION__))->type_common.size) = bitsize_int (GET_MODE_BITSIZE (mode))size_int_kind (GET_MODE_BITSIZE (mode), stk_bitsizetype); | |||
2389 | /* Don't set TYPE_PRECISION here, as it may be set by a bitfield. */ | |||
2390 | TYPE_SIZE_UNIT (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2390, __FUNCTION__))->type_common.size_unit) = size_int (GET_MODE_SIZE (mode))size_int_kind (GET_MODE_SIZE (mode), stk_sizetype); | |||
2391 | break; | |||
2392 | } | |||
2393 | ||||
2394 | case REAL_TYPE: | |||
2395 | { | |||
2396 | /* Allow the caller to choose the type mode, which is how decimal | |||
2397 | floats are distinguished from binary ones. */ | |||
2398 | if (TYPE_MODE (type)((((enum tree_code) ((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2398, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (type) : (type)->type_common.mode) == VOIDmode((void) 0, E_VOIDmode)) | |||
2399 | SET_TYPE_MODE((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2400, __FUNCTION__))->type_common.mode = (float_mode_for_size (((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2400, __FUNCTION__))->type_common.precision)).require () )) | |||
2400 | (type, float_mode_for_size (TYPE_PRECISION (type)).require ())((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2400, __FUNCTION__))->type_common.mode = (float_mode_for_size (((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2400, __FUNCTION__))->type_common.precision)).require () )); | |||
2401 | scalar_float_mode mode = as_a <scalar_float_mode> (TYPE_MODE (type)((((enum tree_code) ((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2401, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (type) : (type)->type_common.mode)); | |||
2402 | TYPE_SIZE (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2402, __FUNCTION__))->type_common.size) = bitsize_int (GET_MODE_BITSIZE (mode))size_int_kind (GET_MODE_BITSIZE (mode), stk_bitsizetype); | |||
2403 | TYPE_SIZE_UNIT (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2403, __FUNCTION__))->type_common.size_unit) = size_int (GET_MODE_SIZE (mode))size_int_kind (GET_MODE_SIZE (mode), stk_sizetype); | |||
2404 | break; | |||
2405 | } | |||
2406 | ||||
2407 | case FIXED_POINT_TYPE: | |||
2408 | { | |||
2409 | /* TYPE_MODE (type) has been set already. */ | |||
2410 | scalar_mode mode = SCALAR_TYPE_MODE (type)(as_a <scalar_mode> ((tree_class_check ((type), (tcc_type ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2410, __FUNCTION__))->type_common.mode)); | |||
2411 | TYPE_SIZE (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2411, __FUNCTION__))->type_common.size) = bitsize_int (GET_MODE_BITSIZE (mode))size_int_kind (GET_MODE_BITSIZE (mode), stk_bitsizetype); | |||
2412 | TYPE_SIZE_UNIT (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2412, __FUNCTION__))->type_common.size_unit) = size_int (GET_MODE_SIZE (mode))size_int_kind (GET_MODE_SIZE (mode), stk_sizetype); | |||
2413 | break; | |||
2414 | } | |||
2415 | ||||
2416 | case COMPLEX_TYPE: | |||
2417 | TYPE_UNSIGNED (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2417, __FUNCTION__))->base.u.bits.unsigned_flag) = TYPE_UNSIGNED (TREE_TYPE (type))((tree_class_check ((((contains_struct_check ((type), (TS_TYPED ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2417, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2417, __FUNCTION__))->base.u.bits.unsigned_flag); | |||
2418 | SET_TYPE_MODE (type,((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2419, __FUNCTION__))->type_common.mode = (((machine_mode ) mode_complex[((((enum tree_code) ((tree_class_check ((((contains_struct_check ((type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2419, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2419, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (((contains_struct_check ((type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2419, __FUNCTION__))->typed.type)) : (((contains_struct_check ((type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2419, __FUNCTION__))->typed.type))->type_common.mode) ]))) | |||
2419 | GET_MODE_COMPLEX_MODE (TYPE_MODE (TREE_TYPE (type))))((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2419, __FUNCTION__))->type_common.mode = (((machine_mode ) mode_complex[((((enum tree_code) ((tree_class_check ((((contains_struct_check ((type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2419, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2419, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (((contains_struct_check ((type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2419, __FUNCTION__))->typed.type)) : (((contains_struct_check ((type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2419, __FUNCTION__))->typed.type))->type_common.mode) ]))); | |||
2420 | ||||
2421 | TYPE_SIZE (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2421, __FUNCTION__))->type_common.size) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type)))size_int_kind (GET_MODE_BITSIZE (((((enum tree_code) ((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2421, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (type) : (type)->type_common.mode)), stk_bitsizetype); | |||
2422 | TYPE_SIZE_UNIT (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2422, __FUNCTION__))->type_common.size_unit) = size_int (GET_MODE_SIZE (TYPE_MODE (type)))size_int_kind (GET_MODE_SIZE (((((enum tree_code) ((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2422, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (type) : (type)->type_common.mode)), stk_sizetype); | |||
2423 | break; | |||
2424 | ||||
2425 | case VECTOR_TYPE: | |||
2426 | { | |||
2427 | poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (type); | |||
2428 | tree innertype = TREE_TYPE (type)((contains_struct_check ((type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2428, __FUNCTION__))->typed.type); | |||
2429 | ||||
2430 | /* Find an appropriate mode for the vector type. */ | |||
2431 | if (TYPE_MODE (type)((((enum tree_code) ((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2431, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (type) : (type)->type_common.mode) == VOIDmode((void) 0, E_VOIDmode)) | |||
2432 | SET_TYPE_MODE (type,((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2434, __FUNCTION__))->type_common.mode = (mode_for_vector ((as_a <scalar_mode> ((tree_class_check ((innertype), ( tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2433, __FUNCTION__))->type_common.mode)), nunits).else_blk ())) | |||
2433 | mode_for_vector (SCALAR_TYPE_MODE (innertype),((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2434, __FUNCTION__))->type_common.mode = (mode_for_vector ((as_a <scalar_mode> ((tree_class_check ((innertype), ( tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2433, __FUNCTION__))->type_common.mode)), nunits).else_blk ())) | |||
2434 | nunits).else_blk ())((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2434, __FUNCTION__))->type_common.mode = (mode_for_vector ((as_a <scalar_mode> ((tree_class_check ((innertype), ( tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2433, __FUNCTION__))->type_common.mode)), nunits).else_blk ())); | |||
2435 | ||||
2436 | TYPE_SATURATING (type)((tree_not_check4 ((type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2436, __FUNCTION__, (RECORD_TYPE), (UNION_TYPE), (QUAL_UNION_TYPE ), (ARRAY_TYPE)))->base.u.bits.saturating_flag) = TYPE_SATURATING (TREE_TYPE (type))((tree_not_check4 ((((contains_struct_check ((type), (TS_TYPED ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2436, __FUNCTION__))->typed.type)), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2436, __FUNCTION__, (RECORD_TYPE), (UNION_TYPE), (QUAL_UNION_TYPE ), (ARRAY_TYPE)))->base.u.bits.saturating_flag); | |||
2437 | TYPE_UNSIGNED (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2437, __FUNCTION__))->base.u.bits.unsigned_flag) = TYPE_UNSIGNED (TREE_TYPE (type))((tree_class_check ((((contains_struct_check ((type), (TS_TYPED ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2437, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2437, __FUNCTION__))->base.u.bits.unsigned_flag); | |||
2438 | /* Several boolean vector elements may fit in a single unit. */ | |||
2439 | if (VECTOR_BOOLEAN_TYPE_P (type)(((enum tree_code) (type)->base.code) == VECTOR_TYPE && ((enum tree_code) (((contains_struct_check ((type), (TS_TYPED ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2439, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE ) | |||
2440 | && type->type_common.mode != BLKmode((void) 0, E_BLKmode)) | |||
2441 | TYPE_SIZE_UNIT (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2441, __FUNCTION__))->type_common.size_unit) | |||
2442 | = size_int (GET_MODE_SIZE (type->type_common.mode))size_int_kind (GET_MODE_SIZE (type->type_common.mode), stk_sizetype ); | |||
2443 | else | |||
2444 | TYPE_SIZE_UNIT (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2444, __FUNCTION__))->type_common.size_unit) = int_const_binop (MULT_EXPR, | |||
2445 | TYPE_SIZE_UNIT (innertype)((tree_class_check ((innertype), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2445, __FUNCTION__))->type_common.size_unit), | |||
2446 | size_int (nunits)size_int_kind (nunits, stk_sizetype)); | |||
2447 | TYPE_SIZE (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2447, __FUNCTION__))->type_common.size) = int_const_binop | |||
2448 | (MULT_EXPR, | |||
2449 | bits_from_bytes (TYPE_SIZE_UNIT (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2449, __FUNCTION__))->type_common.size_unit)), | |||
2450 | bitsize_int (BITS_PER_UNIT)size_int_kind ((8), stk_bitsizetype)); | |||
2451 | ||||
2452 | /* For vector types, we do not default to the mode's alignment. | |||
2453 | Instead, query a target hook, defaulting to natural alignment. | |||
2454 | This prevents ABI changes depending on whether or not native | |||
2455 | vector modes are supported. */ | |||
2456 | SET_TYPE_ALIGN (type, targetm.vector_alignment (type))((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2456, __FUNCTION__))->type_common.align = ffs_hwi (targetm .vector_alignment (type))); | |||
2457 | ||||
2458 | /* However, if the underlying mode requires a bigger alignment than | |||
2459 | what the target hook provides, we cannot use the mode. For now, | |||
2460 | simply reject that case. */ | |||
2461 | gcc_assert (TYPE_ALIGN (type)((void)(!((((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2461, __FUNCTION__))->type_common.align) ? ((unsigned)1) << (((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2461, __FUNCTION__))->type_common.align) - 1) : 0) >= get_mode_alignment (((((enum tree_code) ((tree_class_check ( (type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2462, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (type) : (type)->type_common.mode))) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2462, __FUNCTION__), 0 : 0)) | |||
2462 | >= GET_MODE_ALIGNMENT (TYPE_MODE (type)))((void)(!((((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2461, __FUNCTION__))->type_common.align) ? ((unsigned)1) << (((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2461, __FUNCTION__))->type_common.align) - 1) : 0) >= get_mode_alignment (((((enum tree_code) ((tree_class_check ( (type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2462, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (type) : (type)->type_common.mode))) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2462, __FUNCTION__), 0 : 0)); | |||
2463 | break; | |||
2464 | } | |||
2465 | ||||
2466 | case VOID_TYPE: | |||
2467 | /* This is an incomplete type and so doesn't have a size. */ | |||
2468 | SET_TYPE_ALIGN (type, 1)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2468, __FUNCTION__))->type_common.align = ffs_hwi (1)); | |||
2469 | TYPE_USER_ALIGN (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2469, __FUNCTION__))->base.u.bits.user_align) = 0; | |||
2470 | SET_TYPE_MODE (type, VOIDmode)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2470, __FUNCTION__))->type_common.mode = (((void) 0, E_VOIDmode ))); | |||
2471 | break; | |||
2472 | ||||
2473 | case OFFSET_TYPE: | |||
2474 | TYPE_SIZE (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2474, __FUNCTION__))->type_common.size) = bitsize_int (POINTER_SIZE)size_int_kind ((((global_options.x_ix86_isa_flags & (1UL << 58)) != 0) ? 32 : ((8) * (((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) ? 8 : 4))), stk_bitsizetype); | |||
2475 | TYPE_SIZE_UNIT (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2475, __FUNCTION__))->type_common.size_unit) = size_int (POINTER_SIZE_UNITS)size_int_kind ((((((global_options.x_ix86_isa_flags & (1UL << 58)) != 0) ? 32 : ((8) * (((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) ? 8 : 4))) + (8) - 1) / (8)), stk_sizetype ); | |||
2476 | /* A pointer might be MODE_PARTIAL_INT, but ptrdiff_t must be | |||
2477 | integral, which may be an __intN. */ | |||
2478 | SET_TYPE_MODE (type, int_mode_for_size (POINTER_SIZE, 0).require ())((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2478, __FUNCTION__))->type_common.mode = (int_mode_for_size ((((global_options.x_ix86_isa_flags & (1UL << 58)) != 0) ? 32 : ((8) * (((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) ? 8 : 4))), 0).require ())); | |||
2479 | TYPE_PRECISION (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2479, __FUNCTION__))->type_common.precision) = POINTER_SIZE(((global_options.x_ix86_isa_flags & (1UL << 58)) != 0) ? 32 : ((8) * (((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) ? 8 : 4))); | |||
2480 | break; | |||
2481 | ||||
2482 | case FUNCTION_TYPE: | |||
2483 | case METHOD_TYPE: | |||
2484 | /* It's hard to see what the mode and size of a function ought to | |||
2485 | be, but we do know the alignment is FUNCTION_BOUNDARY, so | |||
2486 | make it consistent with that. */ | |||
2487 | SET_TYPE_MODE (type,((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2488, __FUNCTION__))->type_common.mode = (int_mode_for_size (8, 0).else_blk ())) | |||
2488 | int_mode_for_size (FUNCTION_BOUNDARY, 0).else_blk ())((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2488, __FUNCTION__))->type_common.mode = (int_mode_for_size (8, 0).else_blk ())); | |||
2489 | TYPE_SIZE (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2489, __FUNCTION__))->type_common.size) = bitsize_int (FUNCTION_BOUNDARY)size_int_kind (8, stk_bitsizetype); | |||
2490 | TYPE_SIZE_UNIT (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2490, __FUNCTION__))->type_common.size_unit) = size_int (FUNCTION_BOUNDARY / BITS_PER_UNIT)size_int_kind (8 / (8), stk_sizetype); | |||
2491 | break; | |||
2492 | ||||
2493 | case POINTER_TYPE: | |||
2494 | case REFERENCE_TYPE: | |||
2495 | { | |||
2496 | scalar_int_mode mode = SCALAR_INT_TYPE_MODE (type)(as_a <scalar_int_mode> ((tree_class_check ((type), (tcc_type ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2496, __FUNCTION__))->type_common.mode)); | |||
2497 | TYPE_SIZE (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2497, __FUNCTION__))->type_common.size) = bitsize_int (GET_MODE_BITSIZE (mode))size_int_kind (GET_MODE_BITSIZE (mode), stk_bitsizetype); | |||
2498 | TYPE_SIZE_UNIT (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2498, __FUNCTION__))->type_common.size_unit) = size_int (GET_MODE_SIZE (mode))size_int_kind (GET_MODE_SIZE (mode), stk_sizetype); | |||
2499 | TYPE_UNSIGNED (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2499, __FUNCTION__))->base.u.bits.unsigned_flag) = 1; | |||
2500 | TYPE_PRECISION (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2500, __FUNCTION__))->type_common.precision) = GET_MODE_PRECISION (mode); | |||
2501 | } | |||
2502 | break; | |||
2503 | ||||
2504 | case ARRAY_TYPE: | |||
2505 | { | |||
2506 | tree index = TYPE_DOMAIN (type)((tree_check ((type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2506, __FUNCTION__, (ARRAY_TYPE)))->type_non_common.values ); | |||
2507 | tree element = TREE_TYPE (type)((contains_struct_check ((type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2507, __FUNCTION__))->typed.type); | |||
2508 | ||||
2509 | /* We need to know both bounds in order to compute the size. */ | |||
2510 | if (index && TYPE_MAX_VALUE (index)((tree_check5 ((index), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2510, __FUNCTION__, (INTEGER_TYPE), (ENUMERAL_TYPE), (BOOLEAN_TYPE ), (REAL_TYPE), (FIXED_POINT_TYPE)))->type_non_common.maxval ) && TYPE_MIN_VALUE (index)((tree_check5 ((index), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2510, __FUNCTION__, (INTEGER_TYPE), (ENUMERAL_TYPE), (BOOLEAN_TYPE ), (REAL_TYPE), (FIXED_POINT_TYPE)))->type_non_common.minval ) | |||
2511 | && TYPE_SIZE (element)((tree_class_check ((element), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2511, __FUNCTION__))->type_common.size)) | |||
2512 | { | |||
2513 | tree ub = TYPE_MAX_VALUE (index)((tree_check5 ((index), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2513, __FUNCTION__, (INTEGER_TYPE), (ENUMERAL_TYPE), (BOOLEAN_TYPE ), (REAL_TYPE), (FIXED_POINT_TYPE)))->type_non_common.maxval ); | |||
2514 | tree lb = TYPE_MIN_VALUE (index)((tree_check5 ((index), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2514, __FUNCTION__, (INTEGER_TYPE), (ENUMERAL_TYPE), (BOOLEAN_TYPE ), (REAL_TYPE), (FIXED_POINT_TYPE)))->type_non_common.minval ); | |||
2515 | tree element_size = TYPE_SIZE (element)((tree_class_check ((element), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2515, __FUNCTION__))->type_common.size); | |||
2516 | tree length; | |||
2517 | ||||
2518 | /* Make sure that an array of zero-sized element is zero-sized | |||
2519 | regardless of its extent. */ | |||
2520 | if (integer_zerop (element_size)) | |||
2521 | length = size_zero_nodeglobal_trees[TI_SIZE_ZERO]; | |||
2522 | ||||
2523 | /* The computation should happen in the original signedness so | |||
2524 | that (possible) negative values are handled appropriately | |||
2525 | when determining overflow. */ | |||
2526 | else | |||
2527 | { | |||
2528 | /* ??? When it is obvious that the range is signed | |||
2529 | represent it using ssizetype. */ | |||
2530 | if (TREE_CODE (lb)((enum tree_code) (lb)->base.code) == INTEGER_CST | |||
2531 | && TREE_CODE (ub)((enum tree_code) (ub)->base.code) == INTEGER_CST | |||
2532 | && TYPE_UNSIGNED (TREE_TYPE (lb))((tree_class_check ((((contains_struct_check ((lb), (TS_TYPED ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2532, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2532, __FUNCTION__))->base.u.bits.unsigned_flag) | |||
2533 | && tree_int_cst_lt (ub, lb)) | |||
2534 | { | |||
2535 | lb = wide_int_to_tree (ssizetypesizetype_tab[(int) stk_ssizetype], | |||
2536 | offset_int::from (wi::to_wide (lb), | |||
2537 | SIGNED)); | |||
2538 | ub = wide_int_to_tree (ssizetypesizetype_tab[(int) stk_ssizetype], | |||
2539 | offset_int::from (wi::to_wide (ub), | |||
2540 | SIGNED)); | |||
2541 | } | |||
2542 | length | |||
2543 | = fold_convert (sizetype,fold_convert_loc (((location_t) 0), sizetype_tab[(int) stk_sizetype ], size_binop_loc (((location_t) 0), PLUS_EXPR, build_int_cst (((contains_struct_check ((lb), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2545, __FUNCTION__))->typed.type), 1), size_binop_loc (( (location_t) 0), MINUS_EXPR, ub, lb))) | |||
2544 | size_binop (PLUS_EXPR,fold_convert_loc (((location_t) 0), sizetype_tab[(int) stk_sizetype ], size_binop_loc (((location_t) 0), PLUS_EXPR, build_int_cst (((contains_struct_check ((lb), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2545, __FUNCTION__))->typed.type), 1), size_binop_loc (( (location_t) 0), MINUS_EXPR, ub, lb))) | |||
2545 | build_int_cst (TREE_TYPE (lb), 1),fold_convert_loc (((location_t) 0), sizetype_tab[(int) stk_sizetype ], size_binop_loc (((location_t) 0), PLUS_EXPR, build_int_cst (((contains_struct_check ((lb), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2545, __FUNCTION__))->typed.type), 1), size_binop_loc (( (location_t) 0), MINUS_EXPR, ub, lb))) | |||
2546 | size_binop (MINUS_EXPR, ub, lb)))fold_convert_loc (((location_t) 0), sizetype_tab[(int) stk_sizetype ], size_binop_loc (((location_t) 0), PLUS_EXPR, build_int_cst (((contains_struct_check ((lb), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2545, __FUNCTION__))->typed.type), 1), size_binop_loc (( (location_t) 0), MINUS_EXPR, ub, lb))); | |||
2547 | } | |||
2548 | ||||
2549 | /* ??? We have no way to distinguish a null-sized array from an | |||
2550 | array spanning the whole sizetype range, so we arbitrarily | |||
2551 | decide that [0, -1] is the only valid representation. */ | |||
2552 | if (integer_zerop (length) | |||
2553 | && TREE_OVERFLOW (length)((tree_class_check ((length), (tcc_constant), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2553, __FUNCTION__))->base.public_flag) | |||
2554 | && integer_zerop (lb)) | |||
2555 | length = size_zero_nodeglobal_trees[TI_SIZE_ZERO]; | |||
2556 | ||||
2557 | TYPE_SIZE (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2557, __FUNCTION__))->type_common.size) = size_binop (MULT_EXPR, element_size,size_binop_loc (((location_t) 0), MULT_EXPR, element_size, bits_from_bytes (length)) | |||
2558 | bits_from_bytes (length))size_binop_loc (((location_t) 0), MULT_EXPR, element_size, bits_from_bytes (length)); | |||
2559 | ||||
2560 | /* If we know the size of the element, calculate the total size | |||
2561 | directly, rather than do some division thing below. This | |||
2562 | optimization helps Fortran assumed-size arrays (where the | |||
2563 | size of the array is determined at runtime) substantially. */ | |||
2564 | if (TYPE_SIZE_UNIT (element)((tree_class_check ((element), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2564, __FUNCTION__))->type_common.size_unit)) | |||
2565 | TYPE_SIZE_UNIT (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2565, __FUNCTION__))->type_common.size_unit) | |||
2566 | = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (element), length)size_binop_loc (((location_t) 0), MULT_EXPR, ((tree_class_check ((element), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2566, __FUNCTION__))->type_common.size_unit), length); | |||
2567 | } | |||
2568 | ||||
2569 | /* Now round the alignment and size, | |||
2570 | using machine-dependent criteria if any. */ | |||
2571 | ||||
2572 | unsigned align = TYPE_ALIGN (element)(((tree_class_check ((element), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2572, __FUNCTION__))->type_common.align) ? ((unsigned)1) << (((tree_class_check ((element), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2572, __FUNCTION__))->type_common.align) - 1) : 0); | |||
2573 | if (TYPE_USER_ALIGN (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2573, __FUNCTION__))->base.u.bits.user_align)) | |||
2574 | align = MAX (align, TYPE_ALIGN (type))((align) > ((((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2574, __FUNCTION__))->type_common.align) ? ((unsigned)1) << (((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2574, __FUNCTION__))->type_common.align) - 1) : 0)) ? (align ) : ((((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2574, __FUNCTION__))->type_common.align) ? ((unsigned)1) << (((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2574, __FUNCTION__))->type_common.align) - 1) : 0))); | |||
2575 | else | |||
2576 | TYPE_USER_ALIGN (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2576, __FUNCTION__))->base.u.bits.user_align) = TYPE_USER_ALIGN (element)((tree_class_check ((element), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2576, __FUNCTION__))->base.u.bits.user_align); | |||
2577 | if (!TYPE_WARN_IF_NOT_ALIGN (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2577, __FUNCTION__))->type_common.warn_if_not_align ? (( unsigned)1) << ((type)->type_common.warn_if_not_align - 1) : 0)) | |||
2578 | SET_TYPE_WARN_IF_NOT_ALIGN (type,((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2579, __FUNCTION__))->type_common.warn_if_not_align = ffs_hwi (((tree_class_check ((element), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2579, __FUNCTION__))->type_common.warn_if_not_align ? (( unsigned)1) << ((element)->type_common.warn_if_not_align - 1) : 0))) | |||
2579 | TYPE_WARN_IF_NOT_ALIGN (element))((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2579, __FUNCTION__))->type_common.warn_if_not_align = ffs_hwi (((tree_class_check ((element), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2579, __FUNCTION__))->type_common.warn_if_not_align ? (( unsigned)1) << ((element)->type_common.warn_if_not_align - 1) : 0))); | |||
2580 | #ifdef ROUND_TYPE_ALIGN | |||
2581 | align = ROUND_TYPE_ALIGN (type, align, BITS_PER_UNIT(8)); | |||
2582 | #else | |||
2583 | align = MAX (align, BITS_PER_UNIT)((align) > ((8)) ? (align) : ((8))); | |||
2584 | #endif | |||
2585 | SET_TYPE_ALIGN (type, align)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2585, __FUNCTION__))->type_common.align = ffs_hwi (align )); | |||
2586 | SET_TYPE_MODE (type, BLKmode)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2586, __FUNCTION__))->type_common.mode = (((void) 0, E_BLKmode ))); | |||
2587 | if (TYPE_SIZE (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2587, __FUNCTION__))->type_common.size) != 0 | |||
2588 | && ! targetm.member_type_forces_blk (type, VOIDmode((void) 0, E_VOIDmode)) | |||
2589 | /* BLKmode elements force BLKmode aggregate; | |||
2590 | else extract/store fields may lose. */ | |||
2591 | && (TYPE_MODE (TREE_TYPE (type))((((enum tree_code) ((tree_class_check ((((contains_struct_check ((type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2591, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2591, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (((contains_struct_check ((type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2591, __FUNCTION__))->typed.type)) : (((contains_struct_check ((type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2591, __FUNCTION__))->typed.type))->type_common.mode) != BLKmode((void) 0, E_BLKmode) | |||
2592 | || TYPE_NO_FORCE_BLK (TREE_TYPE (type))((tree_class_check ((((contains_struct_check ((type), (TS_TYPED ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2592, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2592, __FUNCTION__))->type_common.no_force_blk_flag))) | |||
2593 | { | |||
2594 | SET_TYPE_MODE (type, mode_for_array (TREE_TYPE (type),((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2595, __FUNCTION__))->type_common.mode = (mode_for_array (((contains_struct_check ((type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2594, __FUNCTION__))->typed.type), ((tree_class_check (( type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2595, __FUNCTION__))->type_common.size)))) | |||
2595 | TYPE_SIZE (type)))((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2595, __FUNCTION__))->type_common.mode = (mode_for_array (((contains_struct_check ((type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2594, __FUNCTION__))->typed.type), ((tree_class_check (( type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2595, __FUNCTION__))->type_common.size)))); | |||
2596 | if (TYPE_MODE (type)((((enum tree_code) ((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2596, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (type) : (type)->type_common.mode) != BLKmode((void) 0, E_BLKmode) | |||
2597 | && STRICT_ALIGNMENT0 && TYPE_ALIGN (type)(((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2597, __FUNCTION__))->type_common.align) ? ((unsigned)1) << (((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2597, __FUNCTION__))->type_common.align) - 1) : 0) < BIGGEST_ALIGNMENT(((global_options.x_target_flags & (1U << 12)) != 0 ) ? 32 : (((global_options.x_ix86_isa_flags & (1UL << 15)) != 0) ? 512 : (((global_options.x_ix86_isa_flags & ( 1UL << 8)) != 0) ? 256 : 128))) | |||
2598 | && TYPE_ALIGN (type)(((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2598, __FUNCTION__))->type_common.align) ? ((unsigned)1) << (((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2598, __FUNCTION__))->type_common.align) - 1) : 0) < GET_MODE_ALIGNMENT (TYPE_MODE (type))get_mode_alignment (((((enum tree_code) ((tree_class_check (( type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2598, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (type) : (type)->type_common.mode))) | |||
2599 | { | |||
2600 | TYPE_NO_FORCE_BLK (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2600, __FUNCTION__))->type_common.no_force_blk_flag) = 1; | |||
2601 | SET_TYPE_MODE (type, BLKmode)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2601, __FUNCTION__))->type_common.mode = (((void) 0, E_BLKmode ))); | |||
2602 | } | |||
2603 | } | |||
2604 | if (AGGREGATE_TYPE_P (element)(((enum tree_code) (element)->base.code) == ARRAY_TYPE || ( ((enum tree_code) (element)->base.code) == RECORD_TYPE || ( (enum tree_code) (element)->base.code) == UNION_TYPE || (( enum tree_code) (element)->base.code) == QUAL_UNION_TYPE))) | |||
2605 | TYPE_TYPELESS_STORAGE (type)((tree_check4 ((type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2605, __FUNCTION__, (RECORD_TYPE), (UNION_TYPE), (QUAL_UNION_TYPE ), (ARRAY_TYPE)))->type_common.typeless_storage) = TYPE_TYPELESS_STORAGE (element)((tree_check4 ((element), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2605, __FUNCTION__, (RECORD_TYPE), (UNION_TYPE), (QUAL_UNION_TYPE ), (ARRAY_TYPE)))->type_common.typeless_storage); | |||
2606 | /* When the element size is constant, check that it is at least as | |||
2607 | large as the element alignment. */ | |||
2608 | if (TYPE_SIZE_UNIT (element)((tree_class_check ((element), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2608, __FUNCTION__))->type_common.size_unit) | |||
2609 | && TREE_CODE (TYPE_SIZE_UNIT (element))((enum tree_code) (((tree_class_check ((element), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2609, __FUNCTION__))->type_common.size_unit))->base.code ) == INTEGER_CST | |||
2610 | /* If TYPE_SIZE_UNIT overflowed, then it is certainly larger than | |||
2611 | TYPE_ALIGN_UNIT. */ | |||
2612 | && !TREE_OVERFLOW (TYPE_SIZE_UNIT (element))((tree_class_check ((((tree_class_check ((element), (tcc_type ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2612, __FUNCTION__))->type_common.size_unit)), (tcc_constant ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2612, __FUNCTION__))->base.public_flag) | |||
2613 | && !integer_zerop (TYPE_SIZE_UNIT (element)((tree_class_check ((element), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2613, __FUNCTION__))->type_common.size_unit))) | |||
2614 | { | |||
2615 | if (compare_tree_int (TYPE_SIZE_UNIT (element)((tree_class_check ((element), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2615, __FUNCTION__))->type_common.size_unit), | |||
2616 | TYPE_ALIGN_UNIT (element)((((tree_class_check ((element), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2616, __FUNCTION__))->type_common.align) ? ((unsigned)1) << (((tree_class_check ((element), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2616, __FUNCTION__))->type_common.align) - 1) : 0) / (8) )) < 0) | |||
2617 | error ("alignment of array elements is greater than " | |||
2618 | "element size"); | |||
2619 | else if (TYPE_ALIGN_UNIT (element)((((tree_class_check ((element), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2619, __FUNCTION__))->type_common.align) ? ((unsigned)1) << (((tree_class_check ((element), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2619, __FUNCTION__))->type_common.align) - 1) : 0) / (8) ) > 1 | |||
2620 | && (wi::zext (wi::to_wide (TYPE_SIZE_UNIT (element)((tree_class_check ((element), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2620, __FUNCTION__))->type_common.size_unit)), | |||
2621 | ffs_hwi (TYPE_ALIGN_UNIT (element)((((tree_class_check ((element), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2621, __FUNCTION__))->type_common.align) ? ((unsigned)1) << (((tree_class_check ((element), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2621, __FUNCTION__))->type_common.align) - 1) : 0) / (8) )) - 1) | |||
2622 | != 0)) | |||
2623 | error ("size of array element is not a multiple of its " | |||
2624 | "alignment"); | |||
2625 | } | |||
2626 | break; | |||
2627 | } | |||
2628 | ||||
2629 | case RECORD_TYPE: | |||
2630 | case UNION_TYPE: | |||
2631 | case QUAL_UNION_TYPE: | |||
2632 | { | |||
2633 | tree field; | |||
2634 | record_layout_info rli; | |||
2635 | ||||
2636 | /* Initialize the layout information. */ | |||
2637 | rli = start_record_layout (type); | |||
2638 | ||||
2639 | /* If this is a QUAL_UNION_TYPE, we want to process the fields | |||
2640 | in the reverse order in building the COND_EXPR that denotes | |||
2641 | its size. We reverse them again later. */ | |||
2642 | if (TREE_CODE (type)((enum tree_code) (type)->base.code) == QUAL_UNION_TYPE) | |||
2643 | TYPE_FIELDS (type)((tree_check3 ((type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2643, __FUNCTION__, (RECORD_TYPE), (UNION_TYPE), (QUAL_UNION_TYPE )))->type_non_common.values) = nreverse (TYPE_FIELDS (type)((tree_check3 ((type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2643, __FUNCTION__, (RECORD_TYPE), (UNION_TYPE), (QUAL_UNION_TYPE )))->type_non_common.values)); | |||
2644 | ||||
2645 | /* Place all the fields. */ | |||
2646 | for (field = TYPE_FIELDS (type)((tree_check3 ((type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2646, __FUNCTION__, (RECORD_TYPE), (UNION_TYPE), (QUAL_UNION_TYPE )))->type_non_common.values); field; field = DECL_CHAIN (field)(((contains_struct_check (((contains_struct_check ((field), ( TS_DECL_MINIMAL), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2646, __FUNCTION__))), (TS_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2646, __FUNCTION__))->common.chain))) | |||
2647 | place_field (rli, field); | |||
2648 | ||||
2649 | if (TREE_CODE (type)((enum tree_code) (type)->base.code) == QUAL_UNION_TYPE) | |||
2650 | TYPE_FIELDS (type)((tree_check3 ((type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2650, __FUNCTION__, (RECORD_TYPE), (UNION_TYPE), (QUAL_UNION_TYPE )))->type_non_common.values) = nreverse (TYPE_FIELDS (type)((tree_check3 ((type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2650, __FUNCTION__, (RECORD_TYPE), (UNION_TYPE), (QUAL_UNION_TYPE )))->type_non_common.values)); | |||
2651 | ||||
2652 | /* Finish laying out the record. */ | |||
2653 | finish_record_layout (rli, /*free_p=*/true); | |||
2654 | } | |||
2655 | break; | |||
2656 | ||||
2657 | default: | |||
2658 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2658, __FUNCTION__)); | |||
2659 | } | |||
2660 | ||||
2661 | /* Compute the final TYPE_SIZE, TYPE_ALIGN, etc. for TYPE. For | |||
2662 | records and unions, finish_record_layout already called this | |||
2663 | function. */ | |||
2664 | if (!RECORD_OR_UNION_TYPE_P (type)(((enum tree_code) (type)->base.code) == RECORD_TYPE || (( enum tree_code) (type)->base.code) == UNION_TYPE || ((enum tree_code) (type)->base.code) == QUAL_UNION_TYPE)) | |||
2665 | finalize_type_size (type); | |||
2666 | ||||
2667 | /* We should never see alias sets on incomplete aggregates. And we | |||
2668 | should not call layout_type on not incomplete aggregates. */ | |||
2669 | if (AGGREGATE_TYPE_P (type)(((enum tree_code) (type)->base.code) == ARRAY_TYPE || ((( enum tree_code) (type)->base.code) == RECORD_TYPE || ((enum tree_code) (type)->base.code) == UNION_TYPE || ((enum tree_code ) (type)->base.code) == QUAL_UNION_TYPE))) | |||
2670 | gcc_assert (!TYPE_ALIAS_SET_KNOWN_P (type))((void)(!(!((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2670, __FUNCTION__))->type_common.alias_set != -1)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2670, __FUNCTION__), 0 : 0)); | |||
2671 | } | |||
2672 | ||||
2673 | /* Return the least alignment required for type TYPE. */ | |||
2674 | ||||
2675 | unsigned int | |||
2676 | min_align_of_type (tree type) | |||
2677 | { | |||
2678 | unsigned int align = TYPE_ALIGN (type)(((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2678, __FUNCTION__))->type_common.align) ? ((unsigned)1) << (((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2678, __FUNCTION__))->type_common.align) - 1) : 0); | |||
2679 | if (!TYPE_USER_ALIGN (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2679, __FUNCTION__))->base.u.bits.user_align)) | |||
2680 | { | |||
2681 | align = MIN (align, BIGGEST_ALIGNMENT)((align) < ((((global_options.x_target_flags & (1U << 12)) != 0) ? 32 : (((global_options.x_ix86_isa_flags & ( 1UL << 15)) != 0) ? 512 : (((global_options.x_ix86_isa_flags & (1UL << 8)) != 0) ? 256 : 128)))) ? (align) : (( ((global_options.x_target_flags & (1U << 12)) != 0) ? 32 : (((global_options.x_ix86_isa_flags & (1UL << 15)) != 0) ? 512 : (((global_options.x_ix86_isa_flags & ( 1UL << 8)) != 0) ? 256 : 128))))); | |||
2682 | #ifdef BIGGEST_FIELD_ALIGNMENT | |||
2683 | align = MIN (align, BIGGEST_FIELD_ALIGNMENT)((align) < (BIGGEST_FIELD_ALIGNMENT) ? (align) : (BIGGEST_FIELD_ALIGNMENT )); | |||
2684 | #endif | |||
2685 | unsigned int field_align = align; | |||
2686 | #ifdef ADJUST_FIELD_ALIGN | |||
2687 | field_align = ADJUST_FIELD_ALIGN (NULL_TREE, type, field_align)x86_field_alignment ((type), (field_align)); | |||
2688 | #endif | |||
2689 | align = MIN (align, field_align)((align) < (field_align) ? (align) : (field_align)); | |||
2690 | } | |||
2691 | return align / BITS_PER_UNIT(8); | |||
2692 | } | |||
2693 | ||||
2694 | /* Create and return a type for signed integers of PRECISION bits. */ | |||
2695 | ||||
2696 | tree | |||
2697 | make_signed_type (int precision) | |||
2698 | { | |||
2699 | tree type = make_node (INTEGER_TYPE); | |||
2700 | ||||
2701 | TYPE_PRECISION (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2701, __FUNCTION__))->type_common.precision) = precision; | |||
2702 | ||||
2703 | fixup_signed_type (type); | |||
2704 | return type; | |||
2705 | } | |||
2706 | ||||
2707 | /* Create and return a type for unsigned integers of PRECISION bits. */ | |||
2708 | ||||
2709 | tree | |||
2710 | make_unsigned_type (int precision) | |||
2711 | { | |||
2712 | tree type = make_node (INTEGER_TYPE); | |||
2713 | ||||
2714 | TYPE_PRECISION (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2714, __FUNCTION__))->type_common.precision) = precision; | |||
2715 | ||||
2716 | fixup_unsigned_type (type); | |||
2717 | return type; | |||
2718 | } | |||
2719 | ||||
2720 | /* Create and return a type for fract of PRECISION bits, UNSIGNEDP, | |||
2721 | and SATP. */ | |||
2722 | ||||
2723 | tree | |||
2724 | make_fract_type (int precision, int unsignedp, int satp) | |||
2725 | { | |||
2726 | tree type = make_node (FIXED_POINT_TYPE); | |||
2727 | ||||
2728 | TYPE_PRECISION (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2728, __FUNCTION__))->type_common.precision) = precision; | |||
2729 | ||||
2730 | if (satp) | |||
2731 | TYPE_SATURATING (type)((tree_not_check4 ((type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2731, __FUNCTION__, (RECORD_TYPE), (UNION_TYPE), (QUAL_UNION_TYPE ), (ARRAY_TYPE)))->base.u.bits.saturating_flag) = 1; | |||
2732 | ||||
2733 | /* Lay out the type: set its alignment, size, etc. */ | |||
2734 | TYPE_UNSIGNED (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2734, __FUNCTION__))->base.u.bits.unsigned_flag) = unsignedp; | |||
2735 | enum mode_class mclass = unsignedp ? MODE_UFRACT : MODE_FRACT; | |||
2736 | SET_TYPE_MODE (type, mode_for_size (precision, mclass, 0).require ())((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2736, __FUNCTION__))->type_common.mode = (mode_for_size ( precision, mclass, 0).require ())); | |||
2737 | layout_type (type); | |||
2738 | ||||
2739 | return type; | |||
2740 | } | |||
2741 | ||||
2742 | /* Create and return a type for accum of PRECISION bits, UNSIGNEDP, | |||
2743 | and SATP. */ | |||
2744 | ||||
2745 | tree | |||
2746 | make_accum_type (int precision, int unsignedp, int satp) | |||
2747 | { | |||
2748 | tree type = make_node (FIXED_POINT_TYPE); | |||
2749 | ||||
2750 | TYPE_PRECISION (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2750, __FUNCTION__))->type_common.precision) = precision; | |||
2751 | ||||
2752 | if (satp) | |||
2753 | TYPE_SATURATING (type)((tree_not_check4 ((type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2753, __FUNCTION__, (RECORD_TYPE), (UNION_TYPE), (QUAL_UNION_TYPE ), (ARRAY_TYPE)))->base.u.bits.saturating_flag) = 1; | |||
2754 | ||||
2755 | /* Lay out the type: set its alignment, size, etc. */ | |||
2756 | TYPE_UNSIGNED (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2756, __FUNCTION__))->base.u.bits.unsigned_flag) = unsignedp; | |||
2757 | enum mode_class mclass = unsignedp ? MODE_UACCUM : MODE_ACCUM; | |||
2758 | SET_TYPE_MODE (type, mode_for_size (precision, mclass, 0).require ())((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2758, __FUNCTION__))->type_common.mode = (mode_for_size ( precision, mclass, 0).require ())); | |||
2759 | layout_type (type); | |||
2760 | ||||
2761 | return type; | |||
2762 | } | |||
2763 | ||||
2764 | /* Initialize sizetypes so layout_type can use them. */ | |||
2765 | ||||
2766 | void | |||
2767 | initialize_sizetypes (void) | |||
2768 | { | |||
2769 | int precision, bprecision; | |||
2770 | ||||
2771 | /* Get sizetypes precision from the SIZE_TYPE target macro. */ | |||
2772 | if (strcmp (SIZETYPE(((global_options.x_ix86_isa_flags & (1UL << 4)) != 0) ? "long unsigned int" : "unsigned int"), "unsigned int") == 0) | |||
2773 | precision = INT_TYPE_SIZE32; | |||
2774 | else if (strcmp (SIZETYPE(((global_options.x_ix86_isa_flags & (1UL << 4)) != 0) ? "long unsigned int" : "unsigned int"), "long unsigned int") == 0) | |||
2775 | precision = LONG_TYPE_SIZE(((global_options.x_ix86_isa_flags & (1UL << 58)) != 0) ? 32 : ((8) * (((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) ? 8 : 4))); | |||
2776 | else if (strcmp (SIZETYPE(((global_options.x_ix86_isa_flags & (1UL << 4)) != 0) ? "long unsigned int" : "unsigned int"), "long long unsigned int") == 0) | |||
2777 | precision = LONG_LONG_TYPE_SIZE64; | |||
2778 | else if (strcmp (SIZETYPE(((global_options.x_ix86_isa_flags & (1UL << 4)) != 0) ? "long unsigned int" : "unsigned int"), "short unsigned int") == 0) | |||
2779 | precision = SHORT_TYPE_SIZE16; | |||
2780 | else | |||
2781 | { | |||
2782 | int i; | |||
2783 | ||||
2784 | precision = -1; | |||
2785 | for (i = 0; i < NUM_INT_N_ENTS1; i++) | |||
2786 | if (int_n_enabled_p[i]) | |||
2787 | { | |||
2788 | char name[50], altname[50]; | |||
2789 | sprintf (name, "__int%d unsigned", int_n_data[i].bitsize); | |||
2790 | sprintf (altname, "__int%d__ unsigned", int_n_data[i].bitsize); | |||
2791 | ||||
2792 | if (strcmp (name, SIZETYPE(((global_options.x_ix86_isa_flags & (1UL << 4)) != 0) ? "long unsigned int" : "unsigned int")) == 0 | |||
2793 | || strcmp (altname, SIZETYPE(((global_options.x_ix86_isa_flags & (1UL << 4)) != 0) ? "long unsigned int" : "unsigned int")) == 0) | |||
2794 | { | |||
2795 | precision = int_n_data[i].bitsize; | |||
2796 | } | |||
2797 | } | |||
2798 | if (precision == -1) | |||
2799 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2799, __FUNCTION__)); | |||
2800 | } | |||
2801 | ||||
2802 | bprecision | |||
2803 | = MIN (precision + LOG2_BITS_PER_UNIT + 1, MAX_FIXED_MODE_SIZE)((precision + 3 + 1) < (GET_MODE_BITSIZE (((global_options .x_ix86_isa_flags & (1UL << 1)) != 0) ? (scalar_int_mode ((scalar_int_mode::from_int) E_TImode)) : (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)))) ? (precision + 3 + 1 ) : (GET_MODE_BITSIZE (((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) ? (scalar_int_mode ((scalar_int_mode ::from_int) E_TImode)) : (scalar_int_mode ((scalar_int_mode:: from_int) E_DImode))))); | |||
2804 | bprecision = GET_MODE_PRECISION (smallest_int_mode_for_size (bprecision)); | |||
2805 | if (bprecision > HOST_BITS_PER_DOUBLE_INT(2 * 64)) | |||
2806 | bprecision = HOST_BITS_PER_DOUBLE_INT(2 * 64); | |||
2807 | ||||
2808 | /* Create stubs for sizetype and bitsizetype so we can create constants. */ | |||
2809 | sizetypesizetype_tab[(int) stk_sizetype] = make_node (INTEGER_TYPE); | |||
2810 | TYPE_NAME (sizetype)((tree_class_check ((sizetype_tab[(int) stk_sizetype]), (tcc_type ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2810, __FUNCTION__))->type_common.name) = get_identifier ("sizetype")(__builtin_constant_p ("sizetype") ? get_identifier_with_length (("sizetype"), strlen ("sizetype")) : get_identifier ("sizetype" )); | |||
2811 | TYPE_PRECISION (sizetype)((tree_class_check ((sizetype_tab[(int) stk_sizetype]), (tcc_type ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2811, __FUNCTION__))->type_common.precision) = precision; | |||
2812 | TYPE_UNSIGNED (sizetype)((tree_class_check ((sizetype_tab[(int) stk_sizetype]), (tcc_type ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2812, __FUNCTION__))->base.u.bits.unsigned_flag) = 1; | |||
2813 | bitsizetypesizetype_tab[(int) stk_bitsizetype] = make_node (INTEGER_TYPE); | |||
2814 | TYPE_NAME (bitsizetype)((tree_class_check ((sizetype_tab[(int) stk_bitsizetype]), (tcc_type ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2814, __FUNCTION__))->type_common.name) = get_identifier ("bitsizetype")(__builtin_constant_p ("bitsizetype") ? get_identifier_with_length (("bitsizetype"), strlen ("bitsizetype")) : get_identifier ( "bitsizetype")); | |||
2815 | TYPE_PRECISION (bitsizetype)((tree_class_check ((sizetype_tab[(int) stk_bitsizetype]), (tcc_type ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2815, __FUNCTION__))->type_common.precision) = bprecision; | |||
2816 | TYPE_UNSIGNED (bitsizetype)((tree_class_check ((sizetype_tab[(int) stk_bitsizetype]), (tcc_type ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2816, __FUNCTION__))->base.u.bits.unsigned_flag) = 1; | |||
2817 | ||||
2818 | /* Now layout both types manually. */ | |||
2819 | scalar_int_mode mode = smallest_int_mode_for_size (precision); | |||
2820 | SET_TYPE_MODE (sizetype, mode)((tree_class_check ((sizetype_tab[(int) stk_sizetype]), (tcc_type ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2820, __FUNCTION__))->type_common.mode = (mode)); | |||
2821 | SET_TYPE_ALIGN (sizetype, GET_MODE_ALIGNMENT (TYPE_MODE (sizetype)))((tree_class_check ((sizetype_tab[(int) stk_sizetype]), (tcc_type ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2821, __FUNCTION__))->type_common.align = ffs_hwi (get_mode_alignment (((((enum tree_code) ((tree_class_check ((sizetype_tab[(int) stk_sizetype]), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2821, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (sizetype_tab[(int) stk_sizetype]) : (sizetype_tab[(int) stk_sizetype ])->type_common.mode)))); | |||
2822 | TYPE_SIZE (sizetype)((tree_class_check ((sizetype_tab[(int) stk_sizetype]), (tcc_type ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2822, __FUNCTION__))->type_common.size) = bitsize_int (precision)size_int_kind (precision, stk_bitsizetype); | |||
2823 | TYPE_SIZE_UNIT (sizetype)((tree_class_check ((sizetype_tab[(int) stk_sizetype]), (tcc_type ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2823, __FUNCTION__))->type_common.size_unit) = size_int (GET_MODE_SIZE (mode))size_int_kind (GET_MODE_SIZE (mode), stk_sizetype); | |||
2824 | set_min_and_max_values_for_integral_type (sizetypesizetype_tab[(int) stk_sizetype], precision, UNSIGNED); | |||
2825 | ||||
2826 | mode = smallest_int_mode_for_size (bprecision); | |||
2827 | SET_TYPE_MODE (bitsizetype, mode)((tree_class_check ((sizetype_tab[(int) stk_bitsizetype]), (tcc_type ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2827, __FUNCTION__))->type_common.mode = (mode)); | |||
2828 | SET_TYPE_ALIGN (bitsizetype, GET_MODE_ALIGNMENT (TYPE_MODE (bitsizetype)))((tree_class_check ((sizetype_tab[(int) stk_bitsizetype]), (tcc_type ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2828, __FUNCTION__))->type_common.align = ffs_hwi (get_mode_alignment (((((enum tree_code) ((tree_class_check ((sizetype_tab[(int) stk_bitsizetype]), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2828, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (sizetype_tab[(int) stk_bitsizetype]) : (sizetype_tab[(int) stk_bitsizetype ])->type_common.mode)))); | |||
2829 | TYPE_SIZE (bitsizetype)((tree_class_check ((sizetype_tab[(int) stk_bitsizetype]), (tcc_type ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2829, __FUNCTION__))->type_common.size) = bitsize_int (bprecision)size_int_kind (bprecision, stk_bitsizetype); | |||
2830 | TYPE_SIZE_UNIT (bitsizetype)((tree_class_check ((sizetype_tab[(int) stk_bitsizetype]), (tcc_type ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2830, __FUNCTION__))->type_common.size_unit) = size_int (GET_MODE_SIZE (mode))size_int_kind (GET_MODE_SIZE (mode), stk_sizetype); | |||
2831 | set_min_and_max_values_for_integral_type (bitsizetypesizetype_tab[(int) stk_bitsizetype], bprecision, UNSIGNED); | |||
2832 | ||||
2833 | /* Create the signed variants of *sizetype. */ | |||
2834 | ssizetypesizetype_tab[(int) stk_ssizetype] = make_signed_type (TYPE_PRECISION (sizetype)((tree_class_check ((sizetype_tab[(int) stk_sizetype]), (tcc_type ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2834, __FUNCTION__))->type_common.precision)); | |||
2835 | TYPE_NAME (ssizetype)((tree_class_check ((sizetype_tab[(int) stk_ssizetype]), (tcc_type ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2835, __FUNCTION__))->type_common.name) = get_identifier ("ssizetype")(__builtin_constant_p ("ssizetype") ? get_identifier_with_length (("ssizetype"), strlen ("ssizetype")) : get_identifier ("ssizetype" )); | |||
2836 | sbitsizetypesizetype_tab[(int) stk_sbitsizetype] = make_signed_type (TYPE_PRECISION (bitsizetype)((tree_class_check ((sizetype_tab[(int) stk_bitsizetype]), (tcc_type ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2836, __FUNCTION__))->type_common.precision)); | |||
2837 | TYPE_NAME (sbitsizetype)((tree_class_check ((sizetype_tab[(int) stk_sbitsizetype]), ( tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2837, __FUNCTION__))->type_common.name) = get_identifier ("sbitsizetype")(__builtin_constant_p ("sbitsizetype") ? get_identifier_with_length (("sbitsizetype"), strlen ("sbitsizetype")) : get_identifier ("sbitsizetype")); | |||
2838 | } | |||
2839 | ||||
2840 | /* TYPE is an integral type, i.e., an INTEGRAL_TYPE, ENUMERAL_TYPE | |||
2841 | or BOOLEAN_TYPE. Set TYPE_MIN_VALUE and TYPE_MAX_VALUE | |||
2842 | for TYPE, based on the PRECISION and whether or not the TYPE | |||
2843 | IS_UNSIGNED. PRECISION need not correspond to a width supported | |||
2844 | natively by the hardware; for example, on a machine with 8-bit, | |||
2845 | 16-bit, and 32-bit register modes, PRECISION might be 7, 23, or | |||
2846 | 61. */ | |||
2847 | ||||
2848 | void | |||
2849 | set_min_and_max_values_for_integral_type (tree type, | |||
2850 | int precision, | |||
2851 | signop sgn) | |||
2852 | { | |||
2853 | /* For bitfields with zero width we end up creating integer types | |||
2854 | with zero precision. Don't assign any minimum/maximum values | |||
2855 | to those types, they don't have any valid value. */ | |||
2856 | if (precision < 1) | |||
2857 | return; | |||
2858 | ||||
2859 | gcc_assert (precision <= WIDE_INT_MAX_PRECISION)((void)(!(precision <= ((((64*(8)) + 64) / 64) * 64)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2859, __FUNCTION__), 0 : 0)); | |||
2860 | ||||
2861 | TYPE_MIN_VALUE (type)((tree_check5 ((type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2861, __FUNCTION__, (INTEGER_TYPE), (ENUMERAL_TYPE), (BOOLEAN_TYPE ), (REAL_TYPE), (FIXED_POINT_TYPE)))->type_non_common.minval ) | |||
2862 | = wide_int_to_tree (type, wi::min_value (precision, sgn)); | |||
2863 | TYPE_MAX_VALUE (type)((tree_check5 ((type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2863, __FUNCTION__, (INTEGER_TYPE), (ENUMERAL_TYPE), (BOOLEAN_TYPE ), (REAL_TYPE), (FIXED_POINT_TYPE)))->type_non_common.maxval ) | |||
2864 | = wide_int_to_tree (type, wi::max_value (precision, sgn)); | |||
2865 | } | |||
2866 | ||||
2867 | /* Set the extreme values of TYPE based on its precision in bits, | |||
2868 | then lay it out. Used when make_signed_type won't do | |||
2869 | because the tree code is not INTEGER_TYPE. */ | |||
2870 | ||||
2871 | void | |||
2872 | fixup_signed_type (tree type) | |||
2873 | { | |||
2874 | int precision = TYPE_PRECISION (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2874, __FUNCTION__))->type_common.precision); | |||
2875 | ||||
2876 | set_min_and_max_values_for_integral_type (type, precision, SIGNED); | |||
2877 | ||||
2878 | /* Lay out the type: set its alignment, size, etc. */ | |||
2879 | layout_type (type); | |||
2880 | } | |||
2881 | ||||
2882 | /* Set the extreme values of TYPE based on its precision in bits, | |||
2883 | then lay it out. This is used both in `make_unsigned_type' | |||
2884 | and for enumeral types. */ | |||
2885 | ||||
2886 | void | |||
2887 | fixup_unsigned_type (tree type) | |||
2888 | { | |||
2889 | int precision = TYPE_PRECISION (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2889, __FUNCTION__))->type_common.precision); | |||
2890 | ||||
2891 | TYPE_UNSIGNED (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 2891, __FUNCTION__))->base.u.bits.unsigned_flag) = 1; | |||
2892 | ||||
2893 | set_min_and_max_values_for_integral_type (type, precision, UNSIGNED); | |||
2894 | ||||
2895 | /* Lay out the type: set its alignment, size, etc. */ | |||
2896 | layout_type (type); | |||
2897 | } | |||
2898 | ||||
2899 | /* Construct an iterator for a bitfield that spans BITSIZE bits, | |||
2900 | starting at BITPOS. | |||
2901 | ||||
2902 | BITREGION_START is the bit position of the first bit in this | |||
2903 | sequence of bit fields. BITREGION_END is the last bit in this | |||
2904 | sequence. If these two fields are non-zero, we should restrict the | |||
2905 | memory access to that range. Otherwise, we are allowed to touch | |||
2906 | any adjacent non bit-fields. | |||
2907 | ||||
2908 | ALIGN is the alignment of the underlying object in bits. | |||
2909 | VOLATILEP says whether the bitfield is volatile. */ | |||
2910 | ||||
2911 | bit_field_mode_iterator | |||
2912 | ::bit_field_mode_iterator (HOST_WIDE_INTlong bitsize, HOST_WIDE_INTlong bitpos, | |||
2913 | poly_int64 bitregion_start, | |||
2914 | poly_int64 bitregion_end, | |||
2915 | unsigned int align, bool volatilep) | |||
2916 | : m_mode (NARROWEST_INT_MODE(scalar_int_mode (scalar_int_mode::from_int (class_narrowest_mode [MODE_INT])))), m_bitsize (bitsize), | |||
2917 | m_bitpos (bitpos), m_bitregion_start (bitregion_start), | |||
2918 | m_bitregion_end (bitregion_end), m_align (align), | |||
2919 | m_volatilep (volatilep), m_count (0) | |||
2920 | { | |||
2921 | if (known_eq (m_bitregion_end, 0)(!maybe_ne (m_bitregion_end, 0))) | |||
2922 | { | |||
2923 | /* We can assume that any aligned chunk of ALIGN bits that overlaps | |||
2924 | the bitfield is mapped and won't trap, provided that ALIGN isn't | |||
2925 | too large. The cap is the biggest required alignment for data, | |||
2926 | or at least the word size. And force one such chunk at least. */ | |||
2927 | unsigned HOST_WIDE_INTlong units | |||
2928 | = MIN (align, MAX (BIGGEST_ALIGNMENT, BITS_PER_WORD))((align) < ((((((global_options.x_target_flags & (1U << 12)) != 0) ? 32 : (((global_options.x_ix86_isa_flags & ( 1UL << 15)) != 0) ? 512 : (((global_options.x_ix86_isa_flags & (1UL << 8)) != 0) ? 256 : 128)))) > (((8) * ( ((global_options.x_ix86_isa_flags & (1UL << 1)) != 0 ) ? 8 : 4))) ? ((((global_options.x_target_flags & (1U << 12)) != 0) ? 32 : (((global_options.x_ix86_isa_flags & ( 1UL << 15)) != 0) ? 512 : (((global_options.x_ix86_isa_flags & (1UL << 8)) != 0) ? 256 : 128)))) : (((8) * (((global_options .x_ix86_isa_flags & (1UL << 1)) != 0) ? 8 : 4))))) ? (align) : ((((((global_options.x_target_flags & (1U << 12)) != 0) ? 32 : (((global_options.x_ix86_isa_flags & ( 1UL << 15)) != 0) ? 512 : (((global_options.x_ix86_isa_flags & (1UL << 8)) != 0) ? 256 : 128)))) > (((8) * ( ((global_options.x_ix86_isa_flags & (1UL << 1)) != 0 ) ? 8 : 4))) ? ((((global_options.x_target_flags & (1U << 12)) != 0) ? 32 : (((global_options.x_ix86_isa_flags & ( 1UL << 15)) != 0) ? 512 : (((global_options.x_ix86_isa_flags & (1UL << 8)) != 0) ? 256 : 128)))) : (((8) * (((global_options .x_ix86_isa_flags & (1UL << 1)) != 0) ? 8 : 4)))))); | |||
2929 | if (bitsize <= 0) | |||
2930 | bitsize = 1; | |||
2931 | HOST_WIDE_INTlong end = bitpos + bitsize + units - 1; | |||
2932 | m_bitregion_end = end - end % units - 1; | |||
2933 | } | |||
2934 | } | |||
2935 | ||||
2936 | /* Calls to this function return successively larger modes that can be used | |||
2937 | to represent the bitfield. Return true if another bitfield mode is | |||
2938 | available, storing it in *OUT_MODE if so. */ | |||
2939 | ||||
2940 | bool | |||
2941 | bit_field_mode_iterator::next_mode (scalar_int_mode *out_mode) | |||
2942 | { | |||
2943 | scalar_int_mode mode; | |||
2944 | for (; m_mode.exists (&mode); m_mode = GET_MODE_WIDER_MODE (mode)) | |||
2945 | { | |||
2946 | unsigned int unit = GET_MODE_BITSIZE (mode); | |||
2947 | ||||
2948 | /* Skip modes that don't have full precision. */ | |||
2949 | if (unit != GET_MODE_PRECISION (mode)) | |||
2950 | continue; | |||
2951 | ||||
2952 | /* Stop if the mode is too wide to handle efficiently. */ | |||
2953 | if (unit > MAX_FIXED_MODE_SIZEGET_MODE_BITSIZE (((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) ? (scalar_int_mode ((scalar_int_mode::from_int ) E_TImode)) : (scalar_int_mode ((scalar_int_mode::from_int) E_DImode )))) | |||
2954 | break; | |||
2955 | ||||
2956 | /* Don't deliver more than one multiword mode; the smallest one | |||
2957 | should be used. */ | |||
2958 | if (m_count > 0 && unit > BITS_PER_WORD((8) * (((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) ? 8 : 4))) | |||
2959 | break; | |||
2960 | ||||
2961 | /* Skip modes that are too small. */ | |||
2962 | unsigned HOST_WIDE_INTlong substart = (unsigned HOST_WIDE_INTlong) m_bitpos % unit; | |||
2963 | unsigned HOST_WIDE_INTlong subend = substart + m_bitsize; | |||
2964 | if (subend > unit) | |||
2965 | continue; | |||
2966 | ||||
2967 | /* Stop if the mode goes outside the bitregion. */ | |||
2968 | HOST_WIDE_INTlong start = m_bitpos - substart; | |||
2969 | if (maybe_ne (m_bitregion_start, 0) | |||
2970 | && maybe_lt (start, m_bitregion_start)) | |||
2971 | break; | |||
2972 | HOST_WIDE_INTlong end = start + unit; | |||
2973 | if (maybe_gt (end, m_bitregion_end + 1)maybe_lt (m_bitregion_end + 1, end)) | |||
2974 | break; | |||
2975 | ||||
2976 | /* Stop if the mode requires too much alignment. */ | |||
2977 | if (GET_MODE_ALIGNMENT (mode)get_mode_alignment (mode) > m_align | |||
2978 | && targetm.slow_unaligned_access (mode, m_align)) | |||
2979 | break; | |||
2980 | ||||
2981 | *out_mode = mode; | |||
2982 | m_mode = GET_MODE_WIDER_MODE (mode); | |||
2983 | m_count++; | |||
2984 | return true; | |||
2985 | } | |||
2986 | return false; | |||
2987 | } | |||
2988 | ||||
2989 | /* Return true if smaller modes are generally preferred for this kind | |||
2990 | of bitfield. */ | |||
2991 | ||||
2992 | bool | |||
2993 | bit_field_mode_iterator::prefer_smaller_modes () | |||
2994 | { | |||
2995 | return (m_volatilep | |||
2996 | ? targetm.narrow_volatile_bitfield () | |||
2997 | : !SLOW_BYTE_ACCESS0); | |||
2998 | } | |||
2999 | ||||
3000 | /* Find the best machine mode to use when referencing a bit field of length | |||
3001 | BITSIZE bits starting at BITPOS. | |||
3002 | ||||
3003 | BITREGION_START is the bit position of the first bit in this | |||
3004 | sequence of bit fields. BITREGION_END is the last bit in this | |||
3005 | sequence. If these two fields are non-zero, we should restrict the | |||
3006 | memory access to that range. Otherwise, we are allowed to touch | |||
3007 | any adjacent non bit-fields. | |||
3008 | ||||
3009 | The chosen mode must have no more than LARGEST_MODE_BITSIZE bits. | |||
3010 | INT_MAX is a suitable value for LARGEST_MODE_BITSIZE if the caller | |||
3011 | doesn't want to apply a specific limit. | |||
3012 | ||||
3013 | If no mode meets all these conditions, we return VOIDmode. | |||
3014 | ||||
3015 | The underlying object is known to be aligned to a boundary of ALIGN bits. | |||
3016 | ||||
3017 | If VOLATILEP is false and SLOW_BYTE_ACCESS is false, we return the | |||
3018 | smallest mode meeting these conditions. | |||
3019 | ||||
3020 | If VOLATILEP is false and SLOW_BYTE_ACCESS is true, we return the | |||
3021 | largest mode (but a mode no wider than UNITS_PER_WORD) that meets | |||
3022 | all the conditions. | |||
3023 | ||||
3024 | If VOLATILEP is true the narrow_volatile_bitfields target hook is used to | |||
3025 | decide which of the above modes should be used. */ | |||
3026 | ||||
3027 | bool | |||
3028 | get_best_mode (int bitsize, int bitpos, | |||
3029 | poly_uint64 bitregion_start, poly_uint64 bitregion_end, | |||
3030 | unsigned int align, | |||
3031 | unsigned HOST_WIDE_INTlong largest_mode_bitsize, bool volatilep, | |||
3032 | scalar_int_mode *best_mode) | |||
3033 | { | |||
3034 | bit_field_mode_iterator iter (bitsize, bitpos, bitregion_start, | |||
3035 | bitregion_end, align, volatilep); | |||
3036 | scalar_int_mode mode; | |||
3037 | bool found = false; | |||
3038 | while (iter.next_mode (&mode) | |||
3039 | /* ??? For historical reasons, reject modes that would normally | |||
3040 | receive greater alignment, even if unaligned accesses are | |||
3041 | acceptable. This has both advantages and disadvantages. | |||
3042 | Removing this check means that something like: | |||
3043 | ||||
3044 | struct s { unsigned int x; unsigned int y; }; | |||
3045 | int f (struct s *s) { return s->x == 0 && s->y == 0; } | |||
3046 | ||||
3047 | can be implemented using a single load and compare on | |||
3048 | 64-bit machines that have no alignment restrictions. | |||
3049 | For example, on powerpc64-linux-gnu, we would generate: | |||
3050 | ||||
3051 | ld 3,0(3) | |||
3052 | cntlzd 3,3 | |||
3053 | srdi 3,3,6 | |||
3054 | blr | |||
3055 | ||||
3056 | rather than: | |||
3057 | ||||
3058 | lwz 9,0(3) | |||
3059 | cmpwi 7,9,0 | |||
3060 | bne 7,.L3 | |||
3061 | lwz 3,4(3) | |||
3062 | cntlzw 3,3 | |||
3063 | srwi 3,3,5 | |||
3064 | extsw 3,3 | |||
3065 | blr | |||
3066 | .p2align 4,,15 | |||
3067 | .L3: | |||
3068 | li 3,0 | |||
3069 | blr | |||
3070 | ||||
3071 | However, accessing more than one field can make life harder | |||
3072 | for the gimple optimizers. For example, gcc.dg/vect/bb-slp-5.c | |||
3073 | has a series of unsigned short copies followed by a series of | |||
3074 | unsigned short comparisons. With this check, both the copies | |||
3075 | and comparisons remain 16-bit accesses and FRE is able | |||
3076 | to eliminate the latter. Without the check, the comparisons | |||
3077 | can be done using 2 64-bit operations, which FRE isn't able | |||
3078 | to handle in the same way. | |||
3079 | ||||
3080 | Either way, it would probably be worth disabling this check | |||
3081 | during expand. One particular example where removing the | |||
3082 | check would help is the get_best_mode call in store_bit_field. | |||
3083 | If we are given a memory bitregion of 128 bits that is aligned | |||
3084 | to a 64-bit boundary, and the bitfield we want to modify is | |||
3085 | in the second half of the bitregion, this check causes | |||
3086 | store_bitfield to turn the memory into a 64-bit reference | |||
3087 | to the _first_ half of the region. We later use | |||
3088 | adjust_bitfield_address to get a reference to the correct half, | |||
3089 | but doing so looks to adjust_bitfield_address as though we are | |||
3090 | moving past the end of the original object, so it drops the | |||
3091 | associated MEM_EXPR and MEM_OFFSET. Removing the check | |||
3092 | causes store_bit_field to keep a 128-bit memory reference, | |||
3093 | so that the final bitfield reference still has a MEM_EXPR | |||
3094 | and MEM_OFFSET. */ | |||
3095 | && GET_MODE_ALIGNMENT (mode)get_mode_alignment (mode) <= align | |||
3096 | && GET_MODE_BITSIZE (mode) <= largest_mode_bitsize) | |||
3097 | { | |||
3098 | *best_mode = mode; | |||
3099 | found = true; | |||
3100 | if (iter.prefer_smaller_modes ()) | |||
3101 | break; | |||
3102 | } | |||
3103 | ||||
3104 | return found; | |||
3105 | } | |||
3106 | ||||
3107 | /* Gets minimal and maximal values for MODE (signed or unsigned depending on | |||
3108 | SIGN). The returned constants are made to be usable in TARGET_MODE. */ | |||
3109 | ||||
3110 | void | |||
3111 | get_mode_bounds (scalar_int_mode mode, int sign, | |||
3112 | scalar_int_mode target_mode, | |||
3113 | rtx *mmin, rtx *mmax) | |||
3114 | { | |||
3115 | unsigned size = GET_MODE_PRECISION (mode); | |||
3116 | unsigned HOST_WIDE_INTlong min_val, max_val; | |||
3117 | ||||
3118 | gcc_assert (size <= HOST_BITS_PER_WIDE_INT)((void)(!(size <= 64) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stor-layout.cc" , 3118, __FUNCTION__), 0 : 0)); | |||
3119 | ||||
3120 | /* Special case BImode, which has values 0 and STORE_FLAG_VALUE. */ | |||
3121 | if (mode == BImode(scalar_int_mode ((scalar_int_mode::from_int) E_BImode))) | |||
3122 | { | |||
3123 | if (STORE_FLAG_VALUE1 < 0) | |||
3124 | { | |||
3125 | min_val = STORE_FLAG_VALUE1; | |||
3126 | max_val = 0; | |||
3127 | } | |||
3128 | else | |||
3129 | { | |||
3130 | min_val = 0; | |||
3131 | max_val = STORE_FLAG_VALUE1; | |||
3132 | } | |||
3133 | } | |||
3134 | else if (sign) | |||
3135 | { | |||
3136 | min_val = -(HOST_WIDE_INT_1U1UL << (size - 1)); | |||
3137 | max_val = (HOST_WIDE_INT_1U1UL << (size - 1)) - 1; | |||
3138 | } | |||
3139 | else | |||
3140 | { | |||
3141 | min_val = 0; | |||
3142 | max_val = (HOST_WIDE_INT_1U1UL << (size - 1) << 1) - 1; | |||
3143 | } | |||
3144 | ||||
3145 | *mmin = gen_int_mode (min_val, target_mode); | |||
3146 | *mmax = gen_int_mode (max_val, target_mode); | |||
3147 | } | |||
3148 | ||||
3149 | #include "gt-stor-layout.h" |
1 | /* Vector API for GNU compiler. | ||||
2 | Copyright (C) 2004-2023 Free Software Foundation, Inc. | ||||
3 | Contributed by Nathan Sidwell <nathan@codesourcery.com> | ||||
4 | Re-implemented in C++ by Diego Novillo <dnovillo@google.com> | ||||
5 | |||||
6 | This file is part of GCC. | ||||
7 | |||||
8 | GCC is free software; you can redistribute it and/or modify it under | ||||
9 | the terms of the GNU General Public License as published by the Free | ||||
10 | Software Foundation; either version 3, or (at your option) any later | ||||
11 | version. | ||||
12 | |||||
13 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY | ||||
14 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | ||||
15 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | ||||
16 | for more details. | ||||
17 | |||||
18 | You should have received a copy of the GNU General Public License | ||||
19 | along with GCC; see the file COPYING3. If not see | ||||
20 | <http://www.gnu.org/licenses/>. */ | ||||
21 | |||||
22 | #ifndef GCC_VEC_H | ||||
23 | #define GCC_VEC_H | ||||
24 | |||||
25 | /* Some gen* file have no ggc support as the header file gtype-desc.h is | ||||
26 | missing. Provide these definitions in case ggc.h has not been included. | ||||
27 | This is not a problem because any code that runs before gengtype is built | ||||
28 | will never need to use GC vectors.*/ | ||||
29 | |||||
30 | extern void ggc_free (void *); | ||||
31 | extern size_t ggc_round_alloc_size (size_t requested_size); | ||||
32 | extern void *ggc_realloc (void *, size_t MEM_STAT_DECL); | ||||
33 | |||||
34 | /* Templated vector type and associated interfaces. | ||||
35 | |||||
36 | The interface functions are typesafe and use inline functions, | ||||
37 | sometimes backed by out-of-line generic functions. The vectors are | ||||
38 | designed to interoperate with the GTY machinery. | ||||
39 | |||||
40 | There are both 'index' and 'iterate' accessors. The index accessor | ||||
41 | is implemented by operator[]. The iterator returns a boolean | ||||
42 | iteration condition and updates the iteration variable passed by | ||||
43 | reference. Because the iterator will be inlined, the address-of | ||||
44 | can be optimized away. | ||||
45 | |||||
46 | Each operation that increases the number of active elements is | ||||
47 | available in 'quick' and 'safe' variants. The former presumes that | ||||
48 | there is sufficient allocated space for the operation to succeed | ||||
49 | (it dies if there is not). The latter will reallocate the | ||||
50 | vector, if needed. Reallocation causes an exponential increase in | ||||
51 | vector size. If you know you will be adding N elements, it would | ||||
52 | be more efficient to use the reserve operation before adding the | ||||
53 | elements with the 'quick' operation. This will ensure there are at | ||||
54 | least as many elements as you ask for, it will exponentially | ||||
55 | increase if there are too few spare slots. If you want reserve a | ||||
56 | specific number of slots, but do not want the exponential increase | ||||
57 | (for instance, you know this is the last allocation), use the | ||||
58 | reserve_exact operation. You can also create a vector of a | ||||
59 | specific size from the get go. | ||||
60 | |||||
61 | You should prefer the push and pop operations, as they append and | ||||
62 | remove from the end of the vector. If you need to remove several | ||||
63 | items in one go, use the truncate operation. The insert and remove | ||||
64 | operations allow you to change elements in the middle of the | ||||
65 | vector. There are two remove operations, one which preserves the | ||||
66 | element ordering 'ordered_remove', and one which does not | ||||
67 | 'unordered_remove'. The latter function copies the end element | ||||
68 | into the removed slot, rather than invoke a memmove operation. The | ||||
69 | 'lower_bound' function will determine where to place an item in the | ||||
70 | array using insert that will maintain sorted order. | ||||
71 | |||||
72 | Vectors are template types with three arguments: the type of the | ||||
73 | elements in the vector, the allocation strategy, and the physical | ||||
74 | layout to use | ||||
75 | |||||
76 | Four allocation strategies are supported: | ||||
77 | |||||
78 | - Heap: allocation is done using malloc/free. This is the | ||||
79 | default allocation strategy. | ||||
80 | |||||
81 | - GC: allocation is done using ggc_alloc/ggc_free. | ||||
82 | |||||
83 | - GC atomic: same as GC with the exception that the elements | ||||
84 | themselves are assumed to be of an atomic type that does | ||||
85 | not need to be garbage collected. This means that marking | ||||
86 | routines do not need to traverse the array marking the | ||||
87 | individual elements. This increases the performance of | ||||
88 | GC activities. | ||||
89 | |||||
90 | Two physical layouts are supported: | ||||
91 | |||||
92 | - Embedded: The vector is structured using the trailing array | ||||
93 | idiom. The last member of the structure is an array of size | ||||
94 | 1. When the vector is initially allocated, a single memory | ||||
95 | block is created to hold the vector's control data and the | ||||
96 | array of elements. These vectors cannot grow without | ||||
97 | reallocation (see discussion on embeddable vectors below). | ||||
98 | |||||
99 | - Space efficient: The vector is structured as a pointer to an | ||||
100 | embedded vector. This is the default layout. It means that | ||||
101 | vectors occupy a single word of storage before initial | ||||
102 | allocation. Vectors are allowed to grow (the internal | ||||
103 | pointer is reallocated but the main vector instance does not | ||||
104 | need to relocate). | ||||
105 | |||||
106 | The type, allocation and layout are specified when the vector is | ||||
107 | declared. | ||||
108 | |||||
109 | If you need to directly manipulate a vector, then the 'address' | ||||
110 | accessor will return the address of the start of the vector. Also | ||||
111 | the 'space' predicate will tell you whether there is spare capacity | ||||
112 | in the vector. You will not normally need to use these two functions. | ||||
113 | |||||
114 | Notes on the different layout strategies | ||||
115 | |||||
116 | * Embeddable vectors (vec<T, A, vl_embed>) | ||||
117 | |||||
118 | These vectors are suitable to be embedded in other data | ||||
119 | structures so that they can be pre-allocated in a contiguous | ||||
120 | memory block. | ||||
121 | |||||
122 | Embeddable vectors are implemented using the trailing array | ||||
123 | idiom, thus they are not resizeable without changing the address | ||||
124 | of the vector object itself. This means you cannot have | ||||
125 | variables or fields of embeddable vector type -- always use a | ||||
126 | pointer to a vector. The one exception is the final field of a | ||||
127 | structure, which could be a vector type. | ||||
128 | |||||
129 | You will have to use the embedded_size & embedded_init calls to | ||||
130 | create such objects, and they will not be resizeable (so the | ||||
131 | 'safe' allocation variants are not available). | ||||
132 | |||||
133 | Properties of embeddable vectors: | ||||
134 | |||||
135 | - The whole vector and control data are allocated in a single | ||||
136 | contiguous block. It uses the trailing-vector idiom, so | ||||
137 | allocation must reserve enough space for all the elements | ||||
138 | in the vector plus its control data. | ||||
139 | - The vector cannot be re-allocated. | ||||
140 | - The vector cannot grow nor shrink. | ||||
141 | - No indirections needed for access/manipulation. | ||||
142 | - It requires 2 words of storage (prior to vector allocation). | ||||
143 | |||||
144 | |||||
145 | * Space efficient vector (vec<T, A, vl_ptr>) | ||||
146 | |||||
147 | These vectors can grow dynamically and are allocated together | ||||
148 | with their control data. They are suited to be included in data | ||||
149 | structures. Prior to initial allocation, they only take a single | ||||
150 | word of storage. | ||||
151 | |||||
152 | These vectors are implemented as a pointer to embeddable vectors. | ||||
153 | The semantics allow for this pointer to be NULL to represent | ||||
154 | empty vectors. This way, empty vectors occupy minimal space in | ||||
155 | the structure containing them. | ||||
156 | |||||
157 | Properties: | ||||
158 | |||||
159 | - The whole vector and control data are allocated in a single | ||||
160 | contiguous block. | ||||
161 | - The whole vector may be re-allocated. | ||||
162 | - Vector data may grow and shrink. | ||||
163 | - Access and manipulation requires a pointer test and | ||||
164 | indirection. | ||||
165 | - It requires 1 word of storage (prior to vector allocation). | ||||
166 | |||||
167 | An example of their use would be, | ||||
168 | |||||
169 | struct my_struct { | ||||
170 | // A space-efficient vector of tree pointers in GC memory. | ||||
171 | vec<tree, va_gc, vl_ptr> v; | ||||
172 | }; | ||||
173 | |||||
174 | struct my_struct *s; | ||||
175 | |||||
176 | if (s->v.length ()) { we have some contents } | ||||
177 | s->v.safe_push (decl); // append some decl onto the end | ||||
178 | for (ix = 0; s->v.iterate (ix, &elt); ix++) | ||||
179 | { do something with elt } | ||||
180 | */ | ||||
181 | |||||
182 | /* Support function for statistics. */ | ||||
183 | extern void dump_vec_loc_statistics (void); | ||||
184 | |||||
185 | /* Hashtable mapping vec addresses to descriptors. */ | ||||
186 | extern htab_t vec_mem_usage_hash; | ||||
187 | |||||
188 | /* Control data for vectors. This contains the number of allocated | ||||
189 | and used slots inside a vector. */ | ||||
190 | |||||
191 | struct vec_prefix | ||||
192 | { | ||||
193 | /* FIXME - These fields should be private, but we need to cater to | ||||
194 | compilers that have stricter notions of PODness for types. */ | ||||
195 | |||||
196 | /* Memory allocation support routines in vec.cc. */ | ||||
197 | void register_overhead (void *, size_t, size_t CXX_MEM_STAT_INFO); | ||||
198 | void release_overhead (void *, size_t, size_t, bool CXX_MEM_STAT_INFO); | ||||
199 | static unsigned calculate_allocation (vec_prefix *, unsigned, bool); | ||||
200 | static unsigned calculate_allocation_1 (unsigned, unsigned); | ||||
201 | |||||
202 | /* Note that vec_prefix should be a base class for vec, but we use | ||||
203 | offsetof() on vector fields of tree structures (e.g., | ||||
204 | tree_binfo::base_binfos), and offsetof only supports base types. | ||||
205 | |||||
206 | To compensate, we make vec_prefix a field inside vec and make | ||||
207 | vec a friend class of vec_prefix so it can access its fields. */ | ||||
208 | template <typename, typename, typename> friend struct vec; | ||||
209 | |||||
210 | /* The allocator types also need access to our internals. */ | ||||
211 | friend struct va_gc; | ||||
212 | friend struct va_gc_atomic; | ||||
213 | friend struct va_heap; | ||||
214 | |||||
215 | unsigned m_alloc : 31; | ||||
216 | unsigned m_using_auto_storage : 1; | ||||
217 | unsigned m_num; | ||||
218 | }; | ||||
219 | |||||
220 | /* Calculate the number of slots to reserve a vector, making sure that | ||||
221 | RESERVE slots are free. If EXACT grow exactly, otherwise grow | ||||
222 | exponentially. PFX is the control data for the vector. */ | ||||
223 | |||||
224 | inline unsigned | ||||
225 | vec_prefix::calculate_allocation (vec_prefix *pfx, unsigned reserve, | ||||
226 | bool exact) | ||||
227 | { | ||||
228 | if (exact) | ||||
229 | return (pfx ? pfx->m_num : 0) + reserve; | ||||
230 | else if (!pfx) | ||||
231 | return MAX (4, reserve)((4) > (reserve) ? (4) : (reserve)); | ||||
232 | return calculate_allocation_1 (pfx->m_alloc, pfx->m_num + reserve); | ||||
233 | } | ||||
234 | |||||
235 | template<typename, typename, typename> struct vec; | ||||
236 | |||||
237 | /* Valid vector layouts | ||||
238 | |||||
239 | vl_embed - Embeddable vector that uses the trailing array idiom. | ||||
240 | vl_ptr - Space efficient vector that uses a pointer to an | ||||
241 | embeddable vector. */ | ||||
242 | struct vl_embed { }; | ||||
243 | struct vl_ptr { }; | ||||
244 | |||||
245 | |||||
246 | /* Types of supported allocations | ||||
247 | |||||
248 | va_heap - Allocation uses malloc/free. | ||||
249 | va_gc - Allocation uses ggc_alloc. | ||||
250 | va_gc_atomic - Same as GC, but individual elements of the array | ||||
251 | do not need to be marked during collection. */ | ||||
252 | |||||
253 | /* Allocator type for heap vectors. */ | ||||
254 | struct va_heap | ||||
255 | { | ||||
256 | /* Heap vectors are frequently regular instances, so use the vl_ptr | ||||
257 | layout for them. */ | ||||
258 | typedef vl_ptr default_layout; | ||||
259 | |||||
260 | template<typename T> | ||||
261 | static void reserve (vec<T, va_heap, vl_embed> *&, unsigned, bool | ||||
262 | CXX_MEM_STAT_INFO); | ||||
263 | |||||
264 | template<typename T> | ||||
265 | static void release (vec<T, va_heap, vl_embed> *&); | ||||
266 | }; | ||||
267 | |||||
268 | |||||
269 | /* Allocator for heap memory. Ensure there are at least RESERVE free | ||||
270 | slots in V. If EXACT is true, grow exactly, else grow | ||||
271 | exponentially. As a special case, if the vector had not been | ||||
272 | allocated and RESERVE is 0, no vector will be created. */ | ||||
273 | |||||
274 | template<typename T> | ||||
275 | inline void | ||||
276 | va_heap::reserve (vec<T, va_heap, vl_embed> *&v, unsigned reserve, bool exact | ||||
277 | MEM_STAT_DECL) | ||||
278 | { | ||||
279 | size_t elt_size = sizeof (T); | ||||
280 | unsigned alloc | ||||
281 | = vec_prefix::calculate_allocation (v ? &v->m_vecpfx : 0, reserve, exact); | ||||
282 | gcc_checking_assert (alloc)((void)(!(alloc) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 282, __FUNCTION__), 0 : 0)); | ||||
283 | |||||
284 | if (GATHER_STATISTICS0 && v) | ||||
285 | v->m_vecpfx.release_overhead (v, elt_size * v->allocated (), | ||||
286 | v->allocated (), false); | ||||
287 | |||||
288 | size_t size = vec<T, va_heap, vl_embed>::embedded_size (alloc); | ||||
289 | unsigned nelem = v ? v->length () : 0; | ||||
290 | v = static_cast <vec<T, va_heap, vl_embed> *> (xrealloc (v, size)); | ||||
291 | v->embedded_init (alloc, nelem); | ||||
292 | |||||
293 | if (GATHER_STATISTICS0) | ||||
294 | v->m_vecpfx.register_overhead (v, alloc, elt_size PASS_MEM_STAT); | ||||
295 | } | ||||
296 | |||||
297 | |||||
298 | #if GCC_VERSION(4 * 1000 + 2) >= 4007 | ||||
299 | #pragma GCC diagnostic push | ||||
300 | #pragma GCC diagnostic ignored "-Wfree-nonheap-object" | ||||
301 | #endif | ||||
302 | |||||
303 | /* Free the heap space allocated for vector V. */ | ||||
304 | |||||
305 | template<typename T> | ||||
306 | void | ||||
307 | va_heap::release (vec<T, va_heap, vl_embed> *&v) | ||||
308 | { | ||||
309 | size_t elt_size = sizeof (T); | ||||
310 | if (v == NULLnullptr) | ||||
311 | return; | ||||
312 | |||||
313 | if (GATHER_STATISTICS0) | ||||
314 | v->m_vecpfx.release_overhead (v, elt_size * v->allocated (), | ||||
315 | v->allocated (), true); | ||||
316 | ::free (v); | ||||
317 | v = NULLnullptr; | ||||
318 | } | ||||
319 | |||||
320 | #if GCC_VERSION(4 * 1000 + 2) >= 4007 | ||||
321 | #pragma GCC diagnostic pop | ||||
322 | #endif | ||||
323 | |||||
324 | /* Allocator type for GC vectors. Notice that we need the structure | ||||
325 | declaration even if GC is not enabled. */ | ||||
326 | |||||
327 | struct va_gc | ||||
328 | { | ||||
329 | /* Use vl_embed as the default layout for GC vectors. Due to GTY | ||||
330 | limitations, GC vectors must always be pointers, so it is more | ||||
331 | efficient to use a pointer to the vl_embed layout, rather than | ||||
332 | using a pointer to a pointer as would be the case with vl_ptr. */ | ||||
333 | typedef vl_embed default_layout; | ||||
334 | |||||
335 | template<typename T, typename A> | ||||
336 | static void reserve (vec<T, A, vl_embed> *&, unsigned, bool | ||||
337 | CXX_MEM_STAT_INFO); | ||||
338 | |||||
339 | template<typename T, typename A> | ||||
340 | static void release (vec<T, A, vl_embed> *&v); | ||||
341 | }; | ||||
342 | |||||
343 | |||||
344 | /* Free GC memory used by V and reset V to NULL. */ | ||||
345 | |||||
346 | template<typename T, typename A> | ||||
347 | inline void | ||||
348 | va_gc::release (vec<T, A, vl_embed> *&v) | ||||
349 | { | ||||
350 | if (v) | ||||
351 | ::ggc_free (v); | ||||
352 | v = NULLnullptr; | ||||
353 | } | ||||
354 | |||||
355 | |||||
356 | /* Allocator for GC memory. Ensure there are at least RESERVE free | ||||
357 | slots in V. If EXACT is true, grow exactly, else grow | ||||
358 | exponentially. As a special case, if the vector had not been | ||||
359 | allocated and RESERVE is 0, no vector will be created. */ | ||||
360 | |||||
361 | template<typename T, typename A> | ||||
362 | void | ||||
363 | va_gc::reserve (vec<T, A, vl_embed> *&v, unsigned reserve, bool exact | ||||
364 | MEM_STAT_DECL) | ||||
365 | { | ||||
366 | unsigned alloc | ||||
367 | = vec_prefix::calculate_allocation (v ? &v->m_vecpfx : 0, reserve, exact); | ||||
368 | if (!alloc) | ||||
369 | { | ||||
370 | ::ggc_free (v); | ||||
371 | v = NULLnullptr; | ||||
372 | return; | ||||
373 | } | ||||
374 | |||||
375 | /* Calculate the amount of space we want. */ | ||||
376 | size_t size = vec<T, A, vl_embed>::embedded_size (alloc); | ||||
377 | |||||
378 | /* Ask the allocator how much space it will really give us. */ | ||||
379 | size = ::ggc_round_alloc_size (size); | ||||
380 | |||||
381 | /* Adjust the number of slots accordingly. */ | ||||
382 | size_t vec_offset = sizeof (vec_prefix); | ||||
383 | size_t elt_size = sizeof (T); | ||||
384 | alloc = (size - vec_offset) / elt_size; | ||||
385 | |||||
386 | /* And finally, recalculate the amount of space we ask for. */ | ||||
387 | size = vec_offset + alloc * elt_size; | ||||
388 | |||||
389 | unsigned nelem = v ? v->length () : 0; | ||||
390 | v = static_cast <vec<T, A, vl_embed> *> (::ggc_realloc (v, size | ||||
391 | PASS_MEM_STAT)); | ||||
392 | v->embedded_init (alloc, nelem); | ||||
393 | } | ||||
394 | |||||
395 | |||||
396 | /* Allocator type for GC vectors. This is for vectors of types | ||||
397 | atomics w.r.t. collection, so allocation and deallocation is | ||||
398 | completely inherited from va_gc. */ | ||||
399 | struct va_gc_atomic : va_gc | ||||
400 | { | ||||
401 | }; | ||||
402 | |||||
403 | |||||
404 | /* Generic vector template. Default values for A and L indicate the | ||||
405 | most commonly used strategies. | ||||
406 | |||||
407 | FIXME - Ideally, they would all be vl_ptr to encourage using regular | ||||
408 | instances for vectors, but the existing GTY machinery is limited | ||||
409 | in that it can only deal with GC objects that are pointers | ||||
410 | themselves. | ||||
411 | |||||
412 | This means that vector operations that need to deal with | ||||
413 | potentially NULL pointers, must be provided as free | ||||
414 | functions (see the vec_safe_* functions above). */ | ||||
415 | template<typename T, | ||||
416 | typename A = va_heap, | ||||
417 | typename L = typename A::default_layout> | ||||
418 | struct GTY((user)) vec | ||||
419 | { | ||||
420 | }; | ||||
421 | |||||
422 | /* Allow C++11 range-based 'for' to work directly on vec<T>*. */ | ||||
423 | template<typename T, typename A, typename L> | ||||
424 | T* begin (vec<T,A,L> *v) { return v ? v->begin () : nullptr; } | ||||
425 | template<typename T, typename A, typename L> | ||||
426 | T* end (vec<T,A,L> *v) { return v ? v->end () : nullptr; } | ||||
427 | template<typename T, typename A, typename L> | ||||
428 | const T* begin (const vec<T,A,L> *v) { return v ? v->begin () : nullptr; } | ||||
429 | template<typename T, typename A, typename L> | ||||
430 | const T* end (const vec<T,A,L> *v) { return v ? v->end () : nullptr; } | ||||
431 | |||||
432 | /* Generic vec<> debug helpers. | ||||
433 | |||||
434 | These need to be instantiated for each vec<TYPE> used throughout | ||||
435 | the compiler like this: | ||||
436 | |||||
437 | DEFINE_DEBUG_VEC (TYPE) | ||||
438 | |||||
439 | The reason we have a debug_helper() is because GDB can't | ||||
440 | disambiguate a plain call to debug(some_vec), and it must be called | ||||
441 | like debug<TYPE>(some_vec). */ | ||||
442 | |||||
443 | template<typename T> | ||||
444 | void | ||||
445 | debug_helper (vec<T> &ref) | ||||
446 | { | ||||
447 | unsigned i; | ||||
448 | for (i = 0; i < ref.length (); ++i) | ||||
449 | { | ||||
450 | fprintf (stderrstderr, "[%d] = ", i); | ||||
451 | debug_slim (ref[i]); | ||||
452 | fputc ('\n', stderrstderr); | ||||
453 | } | ||||
454 | } | ||||
455 | |||||
456 | /* We need a separate va_gc variant here because default template | ||||
457 | argument for functions cannot be used in c++-98. Once this | ||||
458 | restriction is removed, those variant should be folded with the | ||||
459 | above debug_helper. */ | ||||
460 | |||||
461 | template<typename T> | ||||
462 | void | ||||
463 | debug_helper (vec<T, va_gc> &ref) | ||||
464 | { | ||||
465 | unsigned i; | ||||
466 | for (i = 0; i < ref.length (); ++i) | ||||
467 | { | ||||
468 | fprintf (stderrstderr, "[%d] = ", i); | ||||
469 | debug_slim (ref[i]); | ||||
470 | fputc ('\n', stderrstderr); | ||||
471 | } | ||||
472 | } | ||||
473 | |||||
474 | /* Macro to define debug(vec<T>) and debug(vec<T, va_gc>) helper | ||||
475 | functions for a type T. */ | ||||
476 | |||||
477 | #define DEFINE_DEBUG_VEC(T)template void debug_helper (vec<T> &); template void debug_helper (vec<T, va_gc> &); __attribute__ ((__used__ )) void debug (vec<T> &ref) { debug_helper <T> (ref); } __attribute__ ((__used__)) void debug (vec<T> *ptr) { if (ptr) debug (*ptr); else fprintf (stderr, "<nil>\n" ); } __attribute__ ((__used__)) void debug (vec<T, va_gc> &ref) { debug_helper <T> (ref); } __attribute__ (( __used__)) void debug (vec<T, va_gc> *ptr) { if (ptr) debug (*ptr); else fprintf (stderr, "<nil>\n"); } \ | ||||
478 | template void debug_helper (vec<T> &); \ | ||||
479 | template void debug_helper (vec<T, va_gc> &); \ | ||||
480 | /* Define the vec<T> debug functions. */ \ | ||||
481 | DEBUG_FUNCTION__attribute__ ((__used__)) void \ | ||||
482 | debug (vec<T> &ref) \ | ||||
483 | { \ | ||||
484 | debug_helper <T> (ref); \ | ||||
485 | } \ | ||||
486 | DEBUG_FUNCTION__attribute__ ((__used__)) void \ | ||||
487 | debug (vec<T> *ptr) \ | ||||
488 | { \ | ||||
489 | if (ptr) \ | ||||
490 | debug (*ptr); \ | ||||
491 | else \ | ||||
492 | fprintf (stderrstderr, "<nil>\n"); \ | ||||
493 | } \ | ||||
494 | /* Define the vec<T, va_gc> debug functions. */ \ | ||||
495 | DEBUG_FUNCTION__attribute__ ((__used__)) void \ | ||||
496 | debug (vec<T, va_gc> &ref) \ | ||||
497 | { \ | ||||
498 | debug_helper <T> (ref); \ | ||||
499 | } \ | ||||
500 | DEBUG_FUNCTION__attribute__ ((__used__)) void \ | ||||
501 | debug (vec<T, va_gc> *ptr) \ | ||||
502 | { \ | ||||
503 | if (ptr) \ | ||||
504 | debug (*ptr); \ | ||||
505 | else \ | ||||
506 | fprintf (stderrstderr, "<nil>\n"); \ | ||||
507 | } | ||||
508 | |||||
509 | /* Default-construct N elements in DST. */ | ||||
510 | |||||
511 | template <typename T> | ||||
512 | inline void | ||||
513 | vec_default_construct (T *dst, unsigned n) | ||||
514 | { | ||||
515 | #ifdef BROKEN_VALUE_INITIALIZATION | ||||
516 | /* Versions of GCC before 4.4 sometimes leave certain objects | ||||
517 | uninitialized when value initialized, though if the type has | ||||
518 | user defined default ctor, that ctor is invoked. As a workaround | ||||
519 | perform clearing first and then the value initialization, which | ||||
520 | fixes the case when value initialization doesn't initialize due to | ||||
521 | the bugs and should initialize to all zeros, but still allows | ||||
522 | vectors for types with user defined default ctor that initializes | ||||
523 | some or all elements to non-zero. If T has no user defined | ||||
524 | default ctor and some non-static data members have user defined | ||||
525 | default ctors that initialize to non-zero the workaround will | ||||
526 | still not work properly; in that case we just need to provide | ||||
527 | user defined default ctor. */ | ||||
528 | memset (dst, '\0', sizeof (T) * n); | ||||
529 | #endif | ||||
530 | for ( ; n; ++dst, --n) | ||||
531 | ::new (static_cast<void*>(dst)) T (); | ||||
532 | } | ||||
533 | |||||
534 | /* Copy-construct N elements in DST from *SRC. */ | ||||
535 | |||||
536 | template <typename T> | ||||
537 | inline void | ||||
538 | vec_copy_construct (T *dst, const T *src, unsigned n) | ||||
539 | { | ||||
540 | for ( ; n; ++dst, ++src, --n) | ||||
541 | ::new (static_cast<void*>(dst)) T (*src); | ||||
542 | } | ||||
543 | |||||
544 | /* Type to provide zero-initialized values for vec<T, A, L>. This is | ||||
545 | used to provide nil initializers for vec instances. Since vec must | ||||
546 | be a trivially copyable type that can be copied by memcpy and zeroed | ||||
547 | out by memset, it must have defaulted default and copy ctor and copy | ||||
548 | assignment. To initialize a vec either use value initialization | ||||
549 | (e.g., vec() or vec v{ };) or assign it the value vNULL. This isn't | ||||
550 | needed for file-scope and function-local static vectors, which are | ||||
551 | zero-initialized by default. */ | ||||
552 | struct vnull { }; | ||||
553 | constexpr vnull vNULL{ }; | ||||
554 | |||||
555 | |||||
556 | /* Embeddable vector. These vectors are suitable to be embedded | ||||
557 | in other data structures so that they can be pre-allocated in a | ||||
558 | contiguous memory block. | ||||
559 | |||||
560 | Embeddable vectors are implemented using the trailing array idiom, | ||||
561 | thus they are not resizeable without changing the address of the | ||||
562 | vector object itself. This means you cannot have variables or | ||||
563 | fields of embeddable vector type -- always use a pointer to a | ||||
564 | vector. The one exception is the final field of a structure, which | ||||
565 | could be a vector type. | ||||
566 | |||||
567 | You will have to use the embedded_size & embedded_init calls to | ||||
568 | create such objects, and they will not be resizeable (so the 'safe' | ||||
569 | allocation variants are not available). | ||||
570 | |||||
571 | Properties: | ||||
572 | |||||
573 | - The whole vector and control data are allocated in a single | ||||
574 | contiguous block. It uses the trailing-vector idiom, so | ||||
575 | allocation must reserve enough space for all the elements | ||||
576 | in the vector plus its control data. | ||||
577 | - The vector cannot be re-allocated. | ||||
578 | - The vector cannot grow nor shrink. | ||||
579 | - No indirections needed for access/manipulation. | ||||
580 | - It requires 2 words of storage (prior to vector allocation). */ | ||||
581 | |||||
582 | template<typename T, typename A> | ||||
583 | struct GTY((user)) vec<T, A, vl_embed> | ||||
584 | { | ||||
585 | public: | ||||
586 | unsigned allocated (void) const { return m_vecpfx.m_alloc; } | ||||
587 | unsigned length (void) const { return m_vecpfx.m_num; } | ||||
588 | bool is_empty (void) const { return m_vecpfx.m_num == 0; } | ||||
589 | T *address (void) { return reinterpret_cast <T *> (this + 1); } | ||||
590 | const T *address (void) const | ||||
591 | { return reinterpret_cast <const T *> (this + 1); } | ||||
592 | T *begin () { return address (); } | ||||
593 | const T *begin () const { return address (); } | ||||
594 | T *end () { return address () + length (); } | ||||
595 | const T *end () const { return address () + length (); } | ||||
596 | const T &operator[] (unsigned) const; | ||||
597 | T &operator[] (unsigned); | ||||
598 | T &last (void); | ||||
599 | bool space (unsigned) const; | ||||
600 | bool iterate (unsigned, T *) const; | ||||
601 | bool iterate (unsigned, T **) const; | ||||
602 | vec *copy (ALONE_CXX_MEM_STAT_INFO) const; | ||||
603 | void splice (const vec &); | ||||
604 | void splice (const vec *src); | ||||
605 | T *quick_push (const T &); | ||||
606 | T &pop (void); | ||||
607 | void truncate (unsigned); | ||||
608 | void quick_insert (unsigned, const T &); | ||||
609 | void ordered_remove (unsigned); | ||||
610 | void unordered_remove (unsigned); | ||||
611 | void block_remove (unsigned, unsigned); | ||||
612 | void qsort (int (*) (const void *, const void *))qsort (int (*) (const void *, const void *)); | ||||
613 | void sort (int (*) (const void *, const void *, void *), void *); | ||||
614 | void stablesort (int (*) (const void *, const void *, void *), void *); | ||||
615 | T *bsearch (const void *key, int (*compar) (const void *, const void *)); | ||||
616 | T *bsearch (const void *key, | ||||
617 | int (*compar)(const void *, const void *, void *), void *); | ||||
618 | unsigned lower_bound (const T &, bool (*) (const T &, const T &)) const; | ||||
619 | bool contains (const T &search) const; | ||||
620 | static size_t embedded_size (unsigned); | ||||
621 | void embedded_init (unsigned, unsigned = 0, unsigned = 0); | ||||
622 | void quick_grow (unsigned len); | ||||
623 | void quick_grow_cleared (unsigned len); | ||||
624 | |||||
625 | /* vec class can access our internal data and functions. */ | ||||
626 | template <typename, typename, typename> friend struct vec; | ||||
627 | |||||
628 | /* The allocator types also need access to our internals. */ | ||||
629 | friend struct va_gc; | ||||
630 | friend struct va_gc_atomic; | ||||
631 | friend struct va_heap; | ||||
632 | |||||
633 | /* FIXME - This field should be private, but we need to cater to | ||||
634 | compilers that have stricter notions of PODness for types. */ | ||||
635 | /* Align m_vecpfx to simplify address (). */ | ||||
636 | alignas (T) alignas (vec_prefix) vec_prefix m_vecpfx; | ||||
637 | }; | ||||
638 | |||||
639 | |||||
640 | /* Convenience wrapper functions to use when dealing with pointers to | ||||
641 | embedded vectors. Some functionality for these vectors must be | ||||
642 | provided via free functions for these reasons: | ||||
643 | |||||
644 | 1- The pointer may be NULL (e.g., before initial allocation). | ||||
645 | |||||
646 | 2- When the vector needs to grow, it must be reallocated, so | ||||
647 | the pointer will change its value. | ||||
648 | |||||
649 | Because of limitations with the current GC machinery, all vectors | ||||
650 | in GC memory *must* be pointers. */ | ||||
651 | |||||
652 | |||||
653 | /* If V contains no room for NELEMS elements, return false. Otherwise, | ||||
654 | return true. */ | ||||
655 | template<typename T, typename A> | ||||
656 | inline bool | ||||
657 | vec_safe_space (const vec<T, A, vl_embed> *v, unsigned nelems) | ||||
658 | { | ||||
659 | return v ? v->space (nelems) : nelems == 0; | ||||
660 | } | ||||
661 | |||||
662 | |||||
663 | /* If V is NULL, return 0. Otherwise, return V->length(). */ | ||||
664 | template<typename T, typename A> | ||||
665 | inline unsigned | ||||
666 | vec_safe_length (const vec<T, A, vl_embed> *v) | ||||
667 | { | ||||
668 | return v ? v->length () : 0; | ||||
669 | } | ||||
670 | |||||
671 | |||||
672 | /* If V is NULL, return NULL. Otherwise, return V->address(). */ | ||||
673 | template<typename T, typename A> | ||||
674 | inline T * | ||||
675 | vec_safe_address (vec<T, A, vl_embed> *v) | ||||
676 | { | ||||
677 | return v ? v->address () : NULLnullptr; | ||||
678 | } | ||||
679 | |||||
680 | |||||
681 | /* If V is NULL, return true. Otherwise, return V->is_empty(). */ | ||||
682 | template<typename T, typename A> | ||||
683 | inline bool | ||||
684 | vec_safe_is_empty (vec<T, A, vl_embed> *v) | ||||
685 | { | ||||
686 | return v ? v->is_empty () : true; | ||||
687 | } | ||||
688 | |||||
689 | /* If V does not have space for NELEMS elements, call | ||||
690 | V->reserve(NELEMS, EXACT). */ | ||||
691 | template<typename T, typename A> | ||||
692 | inline bool | ||||
693 | vec_safe_reserve (vec<T, A, vl_embed> *&v, unsigned nelems, bool exact = false | ||||
694 | CXX_MEM_STAT_INFO) | ||||
695 | { | ||||
696 | bool extend = nelems ? !vec_safe_space (v, nelems) : false; | ||||
697 | if (extend
| ||||
698 | A::reserve (v, nelems, exact PASS_MEM_STAT); | ||||
699 | return extend; | ||||
700 | } | ||||
701 | |||||
702 | template<typename T, typename A> | ||||
703 | inline bool | ||||
704 | vec_safe_reserve_exact (vec<T, A, vl_embed> *&v, unsigned nelems | ||||
705 | CXX_MEM_STAT_INFO) | ||||
706 | { | ||||
707 | return vec_safe_reserve (v, nelems, true PASS_MEM_STAT); | ||||
708 | } | ||||
709 | |||||
710 | |||||
711 | /* Allocate GC memory for V with space for NELEMS slots. If NELEMS | ||||
712 | is 0, V is initialized to NULL. */ | ||||
713 | |||||
714 | template<typename T, typename A> | ||||
715 | inline void | ||||
716 | vec_alloc (vec<T, A, vl_embed> *&v, unsigned nelems CXX_MEM_STAT_INFO) | ||||
717 | { | ||||
718 | v = NULLnullptr; | ||||
719 | vec_safe_reserve (v, nelems, false PASS_MEM_STAT); | ||||
720 | } | ||||
721 | |||||
722 | |||||
723 | /* Free the GC memory allocated by vector V and set it to NULL. */ | ||||
724 | |||||
725 | template<typename T, typename A> | ||||
726 | inline void | ||||
727 | vec_free (vec<T, A, vl_embed> *&v) | ||||
728 | { | ||||
729 | A::release (v); | ||||
730 | } | ||||
731 | |||||
732 | |||||
733 | /* Grow V to length LEN. Allocate it, if necessary. */ | ||||
734 | template<typename T, typename A> | ||||
735 | inline void | ||||
736 | vec_safe_grow (vec<T, A, vl_embed> *&v, unsigned len, | ||||
737 | bool exact = false CXX_MEM_STAT_INFO) | ||||
738 | { | ||||
739 | unsigned oldlen = vec_safe_length (v); | ||||
740 | gcc_checking_assert (len >= oldlen)((void)(!(len >= oldlen) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 740, __FUNCTION__), 0 : 0)); | ||||
741 | vec_safe_reserve (v, len - oldlen, exact PASS_MEM_STAT); | ||||
742 | v->quick_grow (len); | ||||
743 | } | ||||
744 | |||||
745 | |||||
746 | /* If V is NULL, allocate it. Call V->safe_grow_cleared(LEN). */ | ||||
747 | template<typename T, typename A> | ||||
748 | inline void | ||||
749 | vec_safe_grow_cleared (vec<T, A, vl_embed> *&v, unsigned len, | ||||
750 | bool exact = false CXX_MEM_STAT_INFO) | ||||
751 | { | ||||
752 | unsigned oldlen = vec_safe_length (v); | ||||
753 | vec_safe_grow (v, len, exact PASS_MEM_STAT); | ||||
754 | vec_default_construct (v->address () + oldlen, len - oldlen); | ||||
755 | } | ||||
756 | |||||
757 | |||||
758 | /* Assume V is not NULL. */ | ||||
759 | |||||
760 | template<typename T> | ||||
761 | inline void | ||||
762 | vec_safe_grow_cleared (vec<T, va_heap, vl_ptr> *&v, | ||||
763 | unsigned len, bool exact = false CXX_MEM_STAT_INFO) | ||||
764 | { | ||||
765 | v->safe_grow_cleared (len, exact PASS_MEM_STAT); | ||||
766 | } | ||||
767 | |||||
768 | /* If V does not have space for NELEMS elements, call | ||||
769 | V->reserve(NELEMS, EXACT). */ | ||||
770 | |||||
771 | template<typename T> | ||||
772 | inline bool | ||||
773 | vec_safe_reserve (vec<T, va_heap, vl_ptr> *&v, unsigned nelems, bool exact = false | ||||
774 | CXX_MEM_STAT_INFO) | ||||
775 | { | ||||
776 | return v->reserve (nelems, exact); | ||||
777 | } | ||||
778 | |||||
779 | |||||
780 | /* If V is NULL return false, otherwise return V->iterate(IX, PTR). */ | ||||
781 | template<typename T, typename A> | ||||
782 | inline bool | ||||
783 | vec_safe_iterate (const vec<T, A, vl_embed> *v, unsigned ix, T **ptr) | ||||
784 | { | ||||
785 | if (v) | ||||
786 | return v->iterate (ix, ptr); | ||||
787 | else | ||||
788 | { | ||||
789 | *ptr = 0; | ||||
790 | return false; | ||||
791 | } | ||||
792 | } | ||||
793 | |||||
794 | template<typename T, typename A> | ||||
795 | inline bool | ||||
796 | vec_safe_iterate (const vec<T, A, vl_embed> *v, unsigned ix, T *ptr) | ||||
797 | { | ||||
798 | if (v) | ||||
799 | return v->iterate (ix, ptr); | ||||
800 | else | ||||
801 | { | ||||
802 | *ptr = 0; | ||||
803 | return false; | ||||
804 | } | ||||
805 | } | ||||
806 | |||||
807 | |||||
808 | /* If V has no room for one more element, reallocate it. Then call | ||||
809 | V->quick_push(OBJ). */ | ||||
810 | template<typename T, typename A> | ||||
811 | inline T * | ||||
812 | vec_safe_push (vec<T, A, vl_embed> *&v, const T &obj CXX_MEM_STAT_INFO) | ||||
813 | { | ||||
814 | vec_safe_reserve (v, 1, false PASS_MEM_STAT); | ||||
815 | return v->quick_push (obj); | ||||
816 | } | ||||
817 | |||||
818 | |||||
819 | /* if V has no room for one more element, reallocate it. Then call | ||||
820 | V->quick_insert(IX, OBJ). */ | ||||
821 | template<typename T, typename A> | ||||
822 | inline void | ||||
823 | vec_safe_insert (vec<T, A, vl_embed> *&v, unsigned ix, const T &obj | ||||
824 | CXX_MEM_STAT_INFO) | ||||
825 | { | ||||
826 | vec_safe_reserve (v, 1, false PASS_MEM_STAT); | ||||
827 | v->quick_insert (ix, obj); | ||||
828 | } | ||||
829 | |||||
830 | |||||
831 | /* If V is NULL, do nothing. Otherwise, call V->truncate(SIZE). */ | ||||
832 | template<typename T, typename A> | ||||
833 | inline void | ||||
834 | vec_safe_truncate (vec<T, A, vl_embed> *v, unsigned size) | ||||
835 | { | ||||
836 | if (v) | ||||
837 | v->truncate (size); | ||||
838 | } | ||||
839 | |||||
840 | |||||
841 | /* If SRC is not NULL, return a pointer to a copy of it. */ | ||||
842 | template<typename T, typename A> | ||||
843 | inline vec<T, A, vl_embed> * | ||||
844 | vec_safe_copy (vec<T, A, vl_embed> *src CXX_MEM_STAT_INFO) | ||||
845 | { | ||||
846 | return src ? src->copy (ALONE_PASS_MEM_STAT) : NULLnullptr; | ||||
847 | } | ||||
848 | |||||
849 | /* Copy the elements from SRC to the end of DST as if by memcpy. | ||||
850 | Reallocate DST, if necessary. */ | ||||
851 | template<typename T, typename A> | ||||
852 | inline void | ||||
853 | vec_safe_splice (vec<T, A, vl_embed> *&dst, const vec<T, A, vl_embed> *src | ||||
854 | CXX_MEM_STAT_INFO) | ||||
855 | { | ||||
856 | unsigned src_len = vec_safe_length (src); | ||||
857 | if (src_len) | ||||
858 | { | ||||
859 | vec_safe_reserve_exact (dst, vec_safe_length (dst) + src_len | ||||
860 | PASS_MEM_STAT); | ||||
861 | dst->splice (*src); | ||||
862 | } | ||||
863 | } | ||||
864 | |||||
865 | /* Return true if SEARCH is an element of V. Note that this is O(N) in the | ||||
866 | size of the vector and so should be used with care. */ | ||||
867 | |||||
868 | template<typename T, typename A> | ||||
869 | inline bool | ||||
870 | vec_safe_contains (vec<T, A, vl_embed> *v, const T &search) | ||||
871 | { | ||||
872 | return v ? v->contains (search) : false; | ||||
873 | } | ||||
874 | |||||
875 | /* Index into vector. Return the IX'th element. IX must be in the | ||||
876 | domain of the vector. */ | ||||
877 | |||||
878 | template<typename T, typename A> | ||||
879 | inline const T & | ||||
880 | vec<T, A, vl_embed>::operator[] (unsigned ix) const | ||||
881 | { | ||||
882 | gcc_checking_assert (ix < m_vecpfx.m_num)((void)(!(ix < m_vecpfx.m_num) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 882, __FUNCTION__), 0 : 0)); | ||||
883 | return address ()[ix]; | ||||
884 | } | ||||
885 | |||||
886 | template<typename T, typename A> | ||||
887 | inline T & | ||||
888 | vec<T, A, vl_embed>::operator[] (unsigned ix) | ||||
889 | { | ||||
890 | gcc_checking_assert (ix < m_vecpfx.m_num)((void)(!(ix < m_vecpfx.m_num) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 890, __FUNCTION__), 0 : 0)); | ||||
891 | return address ()[ix]; | ||||
892 | } | ||||
893 | |||||
894 | |||||
895 | /* Get the final element of the vector, which must not be empty. */ | ||||
896 | |||||
897 | template<typename T, typename A> | ||||
898 | inline T & | ||||
899 | vec<T, A, vl_embed>::last (void) | ||||
900 | { | ||||
901 | gcc_checking_assert (m_vecpfx.m_num > 0)((void)(!(m_vecpfx.m_num > 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 901, __FUNCTION__), 0 : 0)); | ||||
902 | return (*this)[m_vecpfx.m_num - 1]; | ||||
903 | } | ||||
904 | |||||
905 | |||||
906 | /* If this vector has space for NELEMS additional entries, return | ||||
907 | true. You usually only need to use this if you are doing your | ||||
908 | own vector reallocation, for instance on an embedded vector. This | ||||
909 | returns true in exactly the same circumstances that vec::reserve | ||||
910 | will. */ | ||||
911 | |||||
912 | template<typename T, typename A> | ||||
913 | inline bool | ||||
914 | vec<T, A, vl_embed>::space (unsigned nelems) const | ||||
915 | { | ||||
916 | return m_vecpfx.m_alloc - m_vecpfx.m_num >= nelems; | ||||
917 | } | ||||
918 | |||||
919 | |||||
920 | /* Return iteration condition and update *PTR to (a copy of) the IX'th | ||||
921 | element of this vector. Use this to iterate over the elements of a | ||||
922 | vector as follows, | ||||
923 | |||||
924 | for (ix = 0; v->iterate (ix, &val); ix++) | ||||
925 | continue; */ | ||||
926 | |||||
927 | template<typename T, typename A> | ||||
928 | inline bool | ||||
929 | vec<T, A, vl_embed>::iterate (unsigned ix, T *ptr) const | ||||
930 | { | ||||
931 | if (ix < m_vecpfx.m_num) | ||||
932 | { | ||||
933 | *ptr = address ()[ix]; | ||||
934 | return true; | ||||
935 | } | ||||
936 | else | ||||
937 | { | ||||
938 | *ptr = 0; | ||||
939 | return false; | ||||
940 | } | ||||
941 | } | ||||
942 | |||||
943 | |||||
944 | /* Return iteration condition and update *PTR to point to the | ||||
945 | IX'th element of this vector. Use this to iterate over the | ||||
946 | elements of a vector as follows, | ||||
947 | |||||
948 | for (ix = 0; v->iterate (ix, &ptr); ix++) | ||||
949 | continue; | ||||
950 | |||||
951 | This variant is for vectors of objects. */ | ||||
952 | |||||
953 | template<typename T, typename A> | ||||
954 | inline bool | ||||
955 | vec<T, A, vl_embed>::iterate (unsigned ix, T **ptr) const | ||||
956 | { | ||||
957 | if (ix < m_vecpfx.m_num) | ||||
958 | { | ||||
959 | *ptr = CONST_CAST (T *, &address ()[ix])(const_cast<T *> ((&address ()[ix]))); | ||||
960 | return true; | ||||
961 | } | ||||
962 | else | ||||
963 | { | ||||
964 | *ptr = 0; | ||||
965 | return false; | ||||
966 | } | ||||
967 | } | ||||
968 | |||||
969 | |||||
970 | /* Return a pointer to a copy of this vector. */ | ||||
971 | |||||
972 | template<typename T, typename A> | ||||
973 | inline vec<T, A, vl_embed> * | ||||
974 | vec<T, A, vl_embed>::copy (ALONE_MEM_STAT_DECLvoid) const | ||||
975 | { | ||||
976 | vec<T, A, vl_embed> *new_vec = NULLnullptr; | ||||
977 | unsigned len = length (); | ||||
978 | if (len) | ||||
979 | { | ||||
980 | vec_alloc (new_vec, len PASS_MEM_STAT); | ||||
981 | new_vec->embedded_init (len, len); | ||||
982 | vec_copy_construct (new_vec->address (), address (), len); | ||||
983 | } | ||||
984 | return new_vec; | ||||
985 | } | ||||
986 | |||||
987 | |||||
988 | /* Copy the elements from SRC to the end of this vector as if by memcpy. | ||||
989 | The vector must have sufficient headroom available. */ | ||||
990 | |||||
991 | template<typename T, typename A> | ||||
992 | inline void | ||||
993 | vec<T, A, vl_embed>::splice (const vec<T, A, vl_embed> &src) | ||||
994 | { | ||||
995 | unsigned len = src.length (); | ||||
996 | if (len) | ||||
997 | { | ||||
998 | gcc_checking_assert (space (len))((void)(!(space (len)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 998, __FUNCTION__), 0 : 0)); | ||||
999 | vec_copy_construct (end (), src.address (), len); | ||||
1000 | m_vecpfx.m_num += len; | ||||
1001 | } | ||||
1002 | } | ||||
1003 | |||||
1004 | template<typename T, typename A> | ||||
1005 | inline void | ||||
1006 | vec<T, A, vl_embed>::splice (const vec<T, A, vl_embed> *src) | ||||
1007 | { | ||||
1008 | if (src) | ||||
1009 | splice (*src); | ||||
1010 | } | ||||
1011 | |||||
1012 | |||||
1013 | /* Push OBJ (a new element) onto the end of the vector. There must be | ||||
1014 | sufficient space in the vector. Return a pointer to the slot | ||||
1015 | where OBJ was inserted. */ | ||||
1016 | |||||
1017 | template<typename T, typename A> | ||||
1018 | inline T * | ||||
1019 | vec<T, A, vl_embed>::quick_push (const T &obj) | ||||
1020 | { | ||||
1021 | gcc_checking_assert (space (1))((void)(!(space (1)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 1021, __FUNCTION__), 0 : 0)); | ||||
1022 | T *slot = &address ()[m_vecpfx.m_num++]; | ||||
1023 | *slot = obj; | ||||
1024 | return slot; | ||||
1025 | } | ||||
1026 | |||||
1027 | |||||
1028 | /* Pop and return the last element off the end of the vector. */ | ||||
1029 | |||||
1030 | template<typename T, typename A> | ||||
1031 | inline T & | ||||
1032 | vec<T, A, vl_embed>::pop (void) | ||||
1033 | { | ||||
1034 | gcc_checking_assert (length () > 0)((void)(!(length () > 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 1034, __FUNCTION__), 0 : 0)); | ||||
1035 | return address ()[--m_vecpfx.m_num]; | ||||
1036 | } | ||||
1037 | |||||
1038 | |||||
1039 | /* Set the length of the vector to SIZE. The new length must be less | ||||
1040 | than or equal to the current length. This is an O(1) operation. */ | ||||
1041 | |||||
1042 | template<typename T, typename A> | ||||
1043 | inline void | ||||
1044 | vec<T, A, vl_embed>::truncate (unsigned size) | ||||
1045 | { | ||||
1046 | gcc_checking_assert (length () >= size)((void)(!(length () >= size) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 1046, __FUNCTION__), 0 : 0)); | ||||
1047 | m_vecpfx.m_num = size; | ||||
1048 | } | ||||
1049 | |||||
1050 | |||||
1051 | /* Insert an element, OBJ, at the IXth position of this vector. There | ||||
1052 | must be sufficient space. */ | ||||
1053 | |||||
1054 | template<typename T, typename A> | ||||
1055 | inline void | ||||
1056 | vec<T, A, vl_embed>::quick_insert (unsigned ix, const T &obj) | ||||
1057 | { | ||||
1058 | gcc_checking_assert (length () < allocated ())((void)(!(length () < allocated ()) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 1058, __FUNCTION__), 0 : 0)); | ||||
1059 | gcc_checking_assert (ix <= length ())((void)(!(ix <= length ()) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 1059, __FUNCTION__), 0 : 0)); | ||||
1060 | T *slot = &address ()[ix]; | ||||
1061 | memmove (slot + 1, slot, (m_vecpfx.m_num++ - ix) * sizeof (T)); | ||||
1062 | *slot = obj; | ||||
1063 | } | ||||
1064 | |||||
1065 | |||||
1066 | /* Remove an element from the IXth position of this vector. Ordering of | ||||
1067 | remaining elements is preserved. This is an O(N) operation due to | ||||
1068 | memmove. */ | ||||
1069 | |||||
1070 | template<typename T, typename A> | ||||
1071 | inline void | ||||
1072 | vec<T, A, vl_embed>::ordered_remove (unsigned ix) | ||||
1073 | { | ||||
1074 | gcc_checking_assert (ix < length ())((void)(!(ix < length ()) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 1074, __FUNCTION__), 0 : 0)); | ||||
1075 | T *slot = &address ()[ix]; | ||||
1076 | memmove (slot, slot + 1, (--m_vecpfx.m_num - ix) * sizeof (T)); | ||||
1077 | } | ||||
1078 | |||||
1079 | |||||
1080 | /* Remove elements in [START, END) from VEC for which COND holds. Ordering of | ||||
1081 | remaining elements is preserved. This is an O(N) operation. */ | ||||
1082 | |||||
1083 | #define VEC_ORDERED_REMOVE_IF_FROM_TO(vec, read_index, write_index, \{ ((void)(!((end) <= (vec).length ()) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 1084, __FUNCTION__), 0 : 0)); for (read_index = write_index = (start); read_index < (end); ++read_index) { elem_ptr = &(vec)[read_index]; bool remove_p = (cond); if (remove_p ) continue; if (read_index != write_index) (vec)[write_index] = (vec)[read_index]; write_index++; } if (read_index - write_index > 0) (vec).block_remove (write_index, read_index - write_index ); } | ||||
1084 | elem_ptr, start, end, cond){ ((void)(!((end) <= (vec).length ()) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 1084, __FUNCTION__), 0 : 0)); for (read_index = write_index = (start); read_index < (end); ++read_index) { elem_ptr = &(vec)[read_index]; bool remove_p = (cond); if (remove_p ) continue; if (read_index != write_index) (vec)[write_index] = (vec)[read_index]; write_index++; } if (read_index - write_index > 0) (vec).block_remove (write_index, read_index - write_index ); } \ | ||||
1085 | { \ | ||||
1086 | gcc_assert ((end) <= (vec).length ())((void)(!((end) <= (vec).length ()) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 1086, __FUNCTION__), 0 : 0)); \ | ||||
1087 | for (read_index = write_index = (start); read_index < (end); \ | ||||
1088 | ++read_index) \ | ||||
1089 | { \ | ||||
1090 | elem_ptr = &(vec)[read_index]; \ | ||||
1091 | bool remove_p = (cond); \ | ||||
1092 | if (remove_p) \ | ||||
1093 | continue; \ | ||||
1094 | \ | ||||
1095 | if (read_index != write_index) \ | ||||
1096 | (vec)[write_index] = (vec)[read_index]; \ | ||||
1097 | \ | ||||
1098 | write_index++; \ | ||||
1099 | } \ | ||||
1100 | \ | ||||
1101 | if (read_index - write_index > 0) \ | ||||
1102 | (vec).block_remove (write_index, read_index - write_index); \ | ||||
1103 | } | ||||
1104 | |||||
1105 | |||||
1106 | /* Remove elements from VEC for which COND holds. Ordering of remaining | ||||
1107 | elements is preserved. This is an O(N) operation. */ | ||||
1108 | |||||
1109 | #define VEC_ORDERED_REMOVE_IF(vec, read_index, write_index, elem_ptr, \{ ((void)(!(((vec).length ()) <= ((vec)).length ()) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 1110, __FUNCTION__), 0 : 0)); for (read_index = write_index = (0); read_index < ((vec).length ()); ++read_index) { elem_ptr = &((vec))[read_index]; bool remove_p = ((cond)); if (remove_p ) continue; if (read_index != write_index) ((vec))[write_index ] = ((vec))[read_index]; write_index++; } if (read_index - write_index > 0) ((vec)).block_remove (write_index, read_index - write_index ); } | ||||
1110 | cond){ ((void)(!(((vec).length ()) <= ((vec)).length ()) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 1110, __FUNCTION__), 0 : 0)); for (read_index = write_index = (0); read_index < ((vec).length ()); ++read_index) { elem_ptr = &((vec))[read_index]; bool remove_p = ((cond)); if (remove_p ) continue; if (read_index != write_index) ((vec))[write_index ] = ((vec))[read_index]; write_index++; } if (read_index - write_index > 0) ((vec)).block_remove (write_index, read_index - write_index ); } \ | ||||
1111 | VEC_ORDERED_REMOVE_IF_FROM_TO ((vec), read_index, write_index, \{ ((void)(!(((vec).length ()) <= ((vec)).length ()) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 1112, __FUNCTION__), 0 : 0)); for (read_index = write_index = (0); read_index < ((vec).length ()); ++read_index) { elem_ptr = &((vec))[read_index]; bool remove_p = ((cond)); if (remove_p ) continue; if (read_index != write_index) ((vec))[write_index ] = ((vec))[read_index]; write_index++; } if (read_index - write_index > 0) ((vec)).block_remove (write_index, read_index - write_index ); } | ||||
1112 | elem_ptr, 0, (vec).length (), (cond)){ ((void)(!(((vec).length ()) <= ((vec)).length ()) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 1112, __FUNCTION__), 0 : 0)); for (read_index = write_index = (0); read_index < ((vec).length ()); ++read_index) { elem_ptr = &((vec))[read_index]; bool remove_p = ((cond)); if (remove_p ) continue; if (read_index != write_index) ((vec))[write_index ] = ((vec))[read_index]; write_index++; } if (read_index - write_index > 0) ((vec)).block_remove (write_index, read_index - write_index ); } | ||||
1113 | |||||
1114 | /* Remove an element from the IXth position of this vector. Ordering of | ||||
1115 | remaining elements is destroyed. This is an O(1) operation. */ | ||||
1116 | |||||
1117 | template<typename T, typename A> | ||||
1118 | inline void | ||||
1119 | vec<T, A, vl_embed>::unordered_remove (unsigned ix) | ||||
1120 | { | ||||
1121 | gcc_checking_assert (ix < length ())((void)(!(ix < length ()) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 1121, __FUNCTION__), 0 : 0)); | ||||
1122 | T *p = address (); | ||||
1123 | p[ix] = p[--m_vecpfx.m_num]; | ||||
1124 | } | ||||
1125 | |||||
1126 | |||||
1127 | /* Remove LEN elements starting at the IXth. Ordering is retained. | ||||
1128 | This is an O(N) operation due to memmove. */ | ||||
1129 | |||||
1130 | template<typename T, typename A> | ||||
1131 | inline void | ||||
1132 | vec<T, A, vl_embed>::block_remove (unsigned ix, unsigned len) | ||||
1133 | { | ||||
1134 | gcc_checking_assert (ix + len <= length ())((void)(!(ix + len <= length ()) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 1134, __FUNCTION__), 0 : 0)); | ||||
1135 | T *slot = &address ()[ix]; | ||||
1136 | m_vecpfx.m_num -= len; | ||||
1137 | memmove (slot, slot + len, (m_vecpfx.m_num - ix) * sizeof (T)); | ||||
1138 | } | ||||
1139 | |||||
1140 | |||||
1141 | /* Sort the contents of this vector with qsort. CMP is the comparison | ||||
1142 | function to pass to qsort. */ | ||||
1143 | |||||
1144 | template<typename T, typename A> | ||||
1145 | inline void | ||||
1146 | vec<T, A, vl_embed>::qsort (int (*cmp) (const void *, const void *))qsort (int (*cmp) (const void *, const void *)) | ||||
1147 | { | ||||
1148 | if (length () > 1) | ||||
1149 | gcc_qsort (address (), length (), sizeof (T), cmp); | ||||
1150 | } | ||||
1151 | |||||
1152 | /* Sort the contents of this vector with qsort. CMP is the comparison | ||||
1153 | function to pass to qsort. */ | ||||
1154 | |||||
1155 | template<typename T, typename A> | ||||
1156 | inline void | ||||
1157 | vec<T, A, vl_embed>::sort (int (*cmp) (const void *, const void *, void *), | ||||
1158 | void *data) | ||||
1159 | { | ||||
1160 | if (length () > 1) | ||||
1161 | gcc_sort_r (address (), length (), sizeof (T), cmp, data); | ||||
1162 | } | ||||
1163 | |||||
1164 | /* Sort the contents of this vector with gcc_stablesort_r. CMP is the | ||||
1165 | comparison function to pass to qsort. */ | ||||
1166 | |||||
1167 | template<typename T, typename A> | ||||
1168 | inline void | ||||
1169 | vec<T, A, vl_embed>::stablesort (int (*cmp) (const void *, const void *, | ||||
1170 | void *), void *data) | ||||
1171 | { | ||||
1172 | if (length () > 1) | ||||
1173 | gcc_stablesort_r (address (), length (), sizeof (T), cmp, data); | ||||
1174 | } | ||||
1175 | |||||
1176 | /* Search the contents of the sorted vector with a binary search. | ||||
1177 | CMP is the comparison function to pass to bsearch. */ | ||||
1178 | |||||
1179 | template<typename T, typename A> | ||||
1180 | inline T * | ||||
1181 | vec<T, A, vl_embed>::bsearch (const void *key, | ||||
1182 | int (*compar) (const void *, const void *)) | ||||
1183 | { | ||||
1184 | const void *base = this->address (); | ||||
1185 | size_t nmemb = this->length (); | ||||
1186 | size_t size = sizeof (T); | ||||
1187 | /* The following is a copy of glibc stdlib-bsearch.h. */ | ||||
1188 | size_t l, u, idx; | ||||
1189 | const void *p; | ||||
1190 | int comparison; | ||||
1191 | |||||
1192 | l = 0; | ||||
1193 | u = nmemb; | ||||
1194 | while (l < u) | ||||
1195 | { | ||||
1196 | idx = (l + u) / 2; | ||||
1197 | p = (const void *) (((const char *) base) + (idx * size)); | ||||
1198 | comparison = (*compar) (key, p); | ||||
1199 | if (comparison < 0) | ||||
1200 | u = idx; | ||||
1201 | else if (comparison > 0) | ||||
1202 | l = idx + 1; | ||||
1203 | else | ||||
1204 | return (T *)const_cast<void *>(p); | ||||
1205 | } | ||||
1206 | |||||
1207 | return NULLnullptr; | ||||
1208 | } | ||||
1209 | |||||
1210 | /* Search the contents of the sorted vector with a binary search. | ||||
1211 | CMP is the comparison function to pass to bsearch. */ | ||||
1212 | |||||
1213 | template<typename T, typename A> | ||||
1214 | inline T * | ||||
1215 | vec<T, A, vl_embed>::bsearch (const void *key, | ||||
1216 | int (*compar) (const void *, const void *, | ||||
1217 | void *), void *data) | ||||
1218 | { | ||||
1219 | const void *base = this->address (); | ||||
1220 | size_t nmemb = this->length (); | ||||
1221 | size_t size = sizeof (T); | ||||
1222 | /* The following is a copy of glibc stdlib-bsearch.h. */ | ||||
1223 | size_t l, u, idx; | ||||
1224 | const void *p; | ||||
1225 | int comparison; | ||||
1226 | |||||
1227 | l = 0; | ||||
1228 | u = nmemb; | ||||
1229 | while (l < u) | ||||
1230 | { | ||||
1231 | idx = (l + u) / 2; | ||||
1232 | p = (const void *) (((const char *) base) + (idx * size)); | ||||
1233 | comparison = (*compar) (key, p, data); | ||||
1234 | if (comparison < 0) | ||||
1235 | u = idx; | ||||
1236 | else if (comparison > 0) | ||||
1237 | l = idx + 1; | ||||
1238 | else | ||||
1239 | return (T *)const_cast<void *>(p); | ||||
1240 | } | ||||
1241 | |||||
1242 | return NULLnullptr; | ||||
1243 | } | ||||
1244 | |||||
1245 | /* Return true if SEARCH is an element of V. Note that this is O(N) in the | ||||
1246 | size of the vector and so should be used with care. */ | ||||
1247 | |||||
1248 | template<typename T, typename A> | ||||
1249 | inline bool | ||||
1250 | vec<T, A, vl_embed>::contains (const T &search) const | ||||
1251 | { | ||||
1252 | unsigned int len = length (); | ||||
1253 | const T *p = address (); | ||||
1254 | for (unsigned int i = 0; i < len; i++) | ||||
1255 | { | ||||
1256 | const T *slot = &p[i]; | ||||
1257 | if (*slot == search) | ||||
1258 | return true; | ||||
1259 | } | ||||
1260 | |||||
1261 | return false; | ||||
1262 | } | ||||
1263 | |||||
1264 | /* Find and return the first position in which OBJ could be inserted | ||||
1265 | without changing the ordering of this vector. LESSTHAN is a | ||||
1266 | function that returns true if the first argument is strictly less | ||||
1267 | than the second. */ | ||||
1268 | |||||
1269 | template<typename T, typename A> | ||||
1270 | unsigned | ||||
1271 | vec<T, A, vl_embed>::lower_bound (const T &obj, | ||||
1272 | bool (*lessthan)(const T &, const T &)) | ||||
1273 | const | ||||
1274 | { | ||||
1275 | unsigned int len = length (); | ||||
1276 | unsigned int half, middle; | ||||
1277 | unsigned int first = 0; | ||||
1278 | while (len > 0) | ||||
1279 | { | ||||
1280 | half = len / 2; | ||||
1281 | middle = first; | ||||
1282 | middle += half; | ||||
1283 | const T &middle_elem = address ()[middle]; | ||||
1284 | if (lessthan (middle_elem, obj)) | ||||
1285 | { | ||||
1286 | first = middle; | ||||
1287 | ++first; | ||||
1288 | len = len - half - 1; | ||||
1289 | } | ||||
1290 | else | ||||
1291 | len = half; | ||||
1292 | } | ||||
1293 | return first; | ||||
1294 | } | ||||
1295 | |||||
1296 | |||||
1297 | /* Return the number of bytes needed to embed an instance of an | ||||
1298 | embeddable vec inside another data structure. | ||||
1299 | |||||
1300 | Use these methods to determine the required size and initialization | ||||
1301 | of a vector V of type T embedded within another structure (as the | ||||
1302 | final member): | ||||
1303 | |||||
1304 | size_t vec<T, A, vl_embed>::embedded_size (unsigned alloc); | ||||
1305 | void v->embedded_init (unsigned alloc, unsigned num); | ||||
1306 | |||||
1307 | These allow the caller to perform the memory allocation. */ | ||||
1308 | |||||
1309 | template<typename T, typename A> | ||||
1310 | inline size_t | ||||
1311 | vec<T, A, vl_embed>::embedded_size (unsigned alloc) | ||||
1312 | { | ||||
1313 | struct alignas (T) U { char data[sizeof (T)]; }; | ||||
1314 | typedef vec<U, A, vl_embed> vec_embedded; | ||||
1315 | typedef typename std::conditional<std::is_standard_layout<T>::value, | ||||
1316 | vec, vec_embedded>::type vec_stdlayout; | ||||
1317 | static_assert (sizeof (vec_stdlayout) == sizeof (vec), ""); | ||||
1318 | static_assert (alignof (vec_stdlayout) == alignof (vec), ""); | ||||
1319 | return sizeof (vec_stdlayout) + alloc * sizeof (T); | ||||
1320 | } | ||||
1321 | |||||
1322 | |||||
1323 | /* Initialize the vector to contain room for ALLOC elements and | ||||
1324 | NUM active elements. */ | ||||
1325 | |||||
1326 | template<typename T, typename A> | ||||
1327 | inline void | ||||
1328 | vec<T, A, vl_embed>::embedded_init (unsigned alloc, unsigned num, unsigned aut) | ||||
1329 | { | ||||
1330 | m_vecpfx.m_alloc = alloc; | ||||
1331 | m_vecpfx.m_using_auto_storage = aut; | ||||
1332 | m_vecpfx.m_num = num; | ||||
1333 | } | ||||
1334 | |||||
1335 | |||||
1336 | /* Grow the vector to a specific length. LEN must be as long or longer than | ||||
1337 | the current length. The new elements are uninitialized. */ | ||||
1338 | |||||
1339 | template<typename T, typename A> | ||||
1340 | inline void | ||||
1341 | vec<T, A, vl_embed>::quick_grow (unsigned len) | ||||
1342 | { | ||||
1343 | gcc_checking_assert (length () <= len && len <= m_vecpfx.m_alloc)((void)(!(length () <= len && len <= m_vecpfx.m_alloc ) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 1343, __FUNCTION__), 0 : 0)); | ||||
1344 | m_vecpfx.m_num = len; | ||||
1345 | } | ||||
1346 | |||||
1347 | |||||
1348 | /* Grow the vector to a specific length. LEN must be as long or longer than | ||||
1349 | the current length. The new elements are initialized to zero. */ | ||||
1350 | |||||
1351 | template<typename T, typename A> | ||||
1352 | inline void | ||||
1353 | vec<T, A, vl_embed>::quick_grow_cleared (unsigned len) | ||||
1354 | { | ||||
1355 | unsigned oldlen = length (); | ||||
1356 | size_t growby = len - oldlen; | ||||
1357 | quick_grow (len); | ||||
1358 | if (growby != 0) | ||||
1359 | vec_default_construct (address () + oldlen, growby); | ||||
1360 | } | ||||
1361 | |||||
1362 | /* Garbage collection support for vec<T, A, vl_embed>. */ | ||||
1363 | |||||
1364 | template<typename T> | ||||
1365 | void | ||||
1366 | gt_ggc_mx (vec<T, va_gc> *v) | ||||
1367 | { | ||||
1368 | extern void gt_ggc_mx (T &); | ||||
1369 | for (unsigned i = 0; i < v->length (); i++) | ||||
1370 | gt_ggc_mx ((*v)[i]); | ||||
1371 | } | ||||
1372 | |||||
1373 | template<typename T> | ||||
1374 | void | ||||
1375 | gt_ggc_mx (vec<T, va_gc_atomic, vl_embed> *v ATTRIBUTE_UNUSED__attribute__ ((__unused__))) | ||||
1376 | { | ||||
1377 | /* Nothing to do. Vectors of atomic types wrt GC do not need to | ||||
1378 | be traversed. */ | ||||
1379 | } | ||||
1380 | |||||
1381 | |||||
1382 | /* PCH support for vec<T, A, vl_embed>. */ | ||||
1383 | |||||
1384 | template<typename T, typename A> | ||||
1385 | void | ||||
1386 | gt_pch_nx (vec<T, A, vl_embed> *v) | ||||
1387 | { | ||||
1388 | extern void gt_pch_nx (T &); | ||||
1389 | for (unsigned i = 0; i < v->length (); i++) | ||||
1390 | gt_pch_nx ((*v)[i]); | ||||
1391 | } | ||||
1392 | |||||
1393 | template<typename T, typename A> | ||||
1394 | void | ||||
1395 | gt_pch_nx (vec<T *, A, vl_embed> *v, gt_pointer_operator op, void *cookie) | ||||
1396 | { | ||||
1397 | for (unsigned i = 0; i < v->length (); i++) | ||||
1398 | op (&((*v)[i]), NULLnullptr, cookie); | ||||
1399 | } | ||||
1400 | |||||
1401 | template<typename T, typename A> | ||||
1402 | void | ||||
1403 | gt_pch_nx (vec<T, A, vl_embed> *v, gt_pointer_operator op, void *cookie) | ||||
1404 | { | ||||
1405 | extern void gt_pch_nx (T *, gt_pointer_operator, void *); | ||||
1406 | for (unsigned i = 0; i < v->length (); i++) | ||||
1407 | gt_pch_nx (&((*v)[i]), op, cookie); | ||||
1408 | } | ||||
1409 | |||||
1410 | |||||
1411 | /* Space efficient vector. These vectors can grow dynamically and are | ||||
1412 | allocated together with their control data. They are suited to be | ||||
1413 | included in data structures. Prior to initial allocation, they | ||||
1414 | only take a single word of storage. | ||||
1415 | |||||
1416 | These vectors are implemented as a pointer to an embeddable vector. | ||||
1417 | The semantics allow for this pointer to be NULL to represent empty | ||||
1418 | vectors. This way, empty vectors occupy minimal space in the | ||||
1419 | structure containing them. | ||||
1420 | |||||
1421 | Properties: | ||||
1422 | |||||
1423 | - The whole vector and control data are allocated in a single | ||||
1424 | contiguous block. | ||||
1425 | - The whole vector may be re-allocated. | ||||
1426 | - Vector data may grow and shrink. | ||||
1427 | - Access and manipulation requires a pointer test and | ||||
1428 | indirection. | ||||
1429 | - It requires 1 word of storage (prior to vector allocation). | ||||
1430 | |||||
1431 | |||||
1432 | Limitations: | ||||
1433 | |||||
1434 | These vectors must be PODs because they are stored in unions. | ||||
1435 | (http://en.wikipedia.org/wiki/Plain_old_data_structures). | ||||
1436 | As long as we use C++03, we cannot have constructors nor | ||||
1437 | destructors in classes that are stored in unions. */ | ||||
1438 | |||||
1439 | template<typename T, size_t N = 0> | ||||
1440 | class auto_vec; | ||||
1441 | |||||
1442 | template<typename T> | ||||
1443 | struct vec<T, va_heap, vl_ptr> | ||||
1444 | { | ||||
1445 | public: | ||||
1446 | /* Default ctors to ensure triviality. Use value-initialization | ||||
1447 | (e.g., vec() or vec v{ };) or vNULL to create a zero-initialized | ||||
1448 | instance. */ | ||||
1449 | vec () = default; | ||||
1450 | vec (const vec &) = default; | ||||
1451 | /* Initialization from the generic vNULL. */ | ||||
1452 | vec (vnull): m_vec () { } | ||||
1453 | /* Same as default ctor: vec storage must be released manually. */ | ||||
1454 | ~vec () = default; | ||||
1455 | |||||
1456 | /* Defaulted same as copy ctor. */ | ||||
1457 | vec& operator= (const vec &) = default; | ||||
1458 | |||||
1459 | /* Prevent implicit conversion from auto_vec. Use auto_vec::to_vec() | ||||
1460 | instead. */ | ||||
1461 | template <size_t N> | ||||
1462 | vec (auto_vec<T, N> &) = delete; | ||||
1463 | |||||
1464 | template <size_t N> | ||||
1465 | void operator= (auto_vec<T, N> &) = delete; | ||||
1466 | |||||
1467 | /* Memory allocation and deallocation for the embedded vector. | ||||
1468 | Needed because we cannot have proper ctors/dtors defined. */ | ||||
1469 | void create (unsigned nelems CXX_MEM_STAT_INFO); | ||||
1470 | void release (void); | ||||
1471 | |||||
1472 | /* Vector operations. */ | ||||
1473 | bool exists (void) const | ||||
1474 | { return m_vec != NULLnullptr; } | ||||
1475 | |||||
1476 | bool is_empty (void) const | ||||
1477 | { return m_vec ? m_vec->is_empty () : true; } | ||||
1478 | |||||
1479 | unsigned allocated (void) const | ||||
1480 | { return m_vec ? m_vec->allocated () : 0; } | ||||
1481 | |||||
1482 | unsigned length (void) const | ||||
1483 | { return m_vec ? m_vec->length () : 0; } | ||||
1484 | |||||
1485 | T *address (void) | ||||
1486 | { return m_vec ? m_vec->address () : NULLnullptr; } | ||||
1487 | |||||
1488 | const T *address (void) const | ||||
1489 | { return m_vec ? m_vec->address () : NULLnullptr; } | ||||
1490 | |||||
1491 | T *begin () { return address (); } | ||||
1492 | const T *begin () const { return address (); } | ||||
1493 | T *end () { return begin () + length (); } | ||||
1494 | const T *end () const { return begin () + length (); } | ||||
1495 | const T &operator[] (unsigned ix) const | ||||
1496 | { return (*m_vec)[ix]; } | ||||
1497 | |||||
1498 | bool operator!=(const vec &other) const | ||||
1499 | { return !(*this == other); } | ||||
1500 | |||||
1501 | bool operator==(const vec &other) const | ||||
1502 | { return address () == other.address (); } | ||||
1503 | |||||
1504 | T &operator[] (unsigned ix) | ||||
1505 | { return (*m_vec)[ix]; } | ||||
1506 | |||||
1507 | T &last (void) | ||||
1508 | { return m_vec->last (); } | ||||
1509 | |||||
1510 | bool space (int nelems) const | ||||
1511 | { return m_vec ? m_vec->space (nelems) : nelems == 0; } | ||||
1512 | |||||
1513 | bool iterate (unsigned ix, T *p) const; | ||||
1514 | bool iterate (unsigned ix, T **p) const; | ||||
1515 | vec copy (ALONE_CXX_MEM_STAT_INFO) const; | ||||
1516 | bool reserve (unsigned, bool = false CXX_MEM_STAT_INFO); | ||||
1517 | bool reserve_exact (unsigned CXX_MEM_STAT_INFO); | ||||
1518 | void splice (const vec &); | ||||
1519 | void safe_splice (const vec & CXX_MEM_STAT_INFO); | ||||
1520 | T *quick_push (const T &); | ||||
1521 | T *safe_push (const T &CXX_MEM_STAT_INFO); | ||||
1522 | T &pop (void); | ||||
1523 | void truncate (unsigned); | ||||