Bug Summary

File:build/gcc/analyzer/region-model.cc
Warning:line 295, column 23
Use of memory after it is freed

Annotated Source Code

Press '?' to see keyboard shortcuts

clang -cc1 -cc1 -triple x86_64-suse-linux -analyze -disable-free -clear-ast-before-backend -disable-llvm-verifier -discard-value-names -main-file-name region-model.cc -analyzer-checker=core -analyzer-checker=apiModeling -analyzer-checker=unix -analyzer-checker=deadcode -analyzer-checker=cplusplus -analyzer-checker=security.insecureAPI.UncheckedReturn -analyzer-checker=security.insecureAPI.getpw -analyzer-checker=security.insecureAPI.gets -analyzer-checker=security.insecureAPI.mktemp -analyzer-checker=security.insecureAPI.mkstemp -analyzer-checker=security.insecureAPI.vfork -analyzer-checker=nullability.NullPassedToNonnull -analyzer-checker=nullability.NullReturnedFromNonnull -analyzer-output plist -w -setup-static-analyzer -analyzer-config-compatibility-mode=true -mrelocation-model static -mframe-pointer=none -fmath-errno -ffp-contract=on -fno-rounding-math -mconstructor-aliases -funwind-tables=2 -target-cpu x86-64 -tune-cpu generic -debugger-tuning=gdb -fcoverage-compilation-dir=/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/gcc -resource-dir /usr/lib64/clang/15.0.7 -D IN_GCC -D HAVE_CONFIG_H -I . -I analyzer -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../include -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libcpp/include -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libcody -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libdecnumber -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libdecnumber/bid -I ../libdecnumber -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libbacktrace -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/13/../../../../include/c++/13 -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/13/../../../../include/c++/13/x86_64-suse-linux -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/13/../../../../include/c++/13/backward -internal-isystem /usr/lib64/clang/15.0.7/include -internal-isystem /usr/local/include -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/13/../../../../x86_64-suse-linux/include -internal-externc-isystem /include -internal-externc-isystem /usr/include -O2 -Wno-narrowing -Wwrite-strings -fdeprecated-macro -fdebug-compilation-dir=/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/gcc -ferror-limit 19 -fno-rtti -fgnuc-version=4.2.1 -vectorize-loops -vectorize-slp -analyzer-output=plist-html -analyzer-config silence-checkers=core.NullDereference -faddrsig -D__GCC_HAVE_DWARF2_CFI_ASM=1 -o /buildworker/marxinbox-gcc-clang-static-analyzer/objdir/clang-static-analyzer/2023-03-27-141847-20772-1/report-zdRMAs.plist -x c++ /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc
1/* Classes for modeling the state of memory.
2 Copyright (C) 2019-2023 Free Software Foundation, Inc.
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 3, or (at your option)
10any later version.
11
12GCC is distributed in the hope that it will be useful, but
13WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#define INCLUDE_MEMORY
23#include "system.h"
24#include "coretypes.h"
25#include "make-unique.h"
26#include "tree.h"
27#include "function.h"
28#include "basic-block.h"
29#include "gimple.h"
30#include "gimple-iterator.h"
31#include "diagnostic-core.h"
32#include "graphviz.h"
33#include "options.h"
34#include "cgraph.h"
35#include "tree-dfa.h"
36#include "stringpool.h"
37#include "convert.h"
38#include "target.h"
39#include "fold-const.h"
40#include "tree-pretty-print.h"
41#include "diagnostic-color.h"
42#include "diagnostic-metadata.h"
43#include "bitmap.h"
44#include "selftest.h"
45#include "analyzer/analyzer.h"
46#include "analyzer/analyzer-logging.h"
47#include "ordered-hash-map.h"
48#include "options.h"
49#include "cgraph.h"
50#include "cfg.h"
51#include "analyzer/supergraph.h"
52#include "sbitmap.h"
53#include "analyzer/call-string.h"
54#include "analyzer/program-point.h"
55#include "analyzer/store.h"
56#include "analyzer/region-model.h"
57#include "analyzer/constraint-manager.h"
58#include "diagnostic-event-id.h"
59#include "analyzer/sm.h"
60#include "diagnostic-event-id.h"
61#include "analyzer/sm.h"
62#include "analyzer/pending-diagnostic.h"
63#include "analyzer/region-model-reachability.h"
64#include "analyzer/analyzer-selftests.h"
65#include "analyzer/program-state.h"
66#include "analyzer/call-summary.h"
67#include "stor-layout.h"
68#include "attribs.h"
69#include "tree-object-size.h"
70#include "gimple-ssa.h"
71#include "tree-phinodes.h"
72#include "tree-ssa-operands.h"
73#include "ssa-iterators.h"
74#include "calls.h"
75#include "is-a.h"
76#include "gcc-rich-location.h"
77#include "analyzer/checker-event.h"
78#include "analyzer/checker-path.h"
79#include "analyzer/feasible-graph.h"
80
81#if ENABLE_ANALYZER1
82
83namespace ana {
84
85/* Dump T to PP in language-independent form, for debugging/logging/dumping
86 purposes. */
87
88void
89dump_tree (pretty_printer *pp, tree t)
90{
91 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
92}
93
94/* Dump T to PP in language-independent form in quotes, for
95 debugging/logging/dumping purposes. */
96
97void
98dump_quoted_tree (pretty_printer *pp, tree t)
99{
100 pp_begin_quote (pp, pp_show_color (pp)(pp)->show_color);
101 dump_tree (pp, t);
102 pp_end_quote (pp, pp_show_color (pp)(pp)->show_color);
103}
104
105/* Equivalent to pp_printf (pp, "%qT", t), to avoid nesting pp_printf
106 calls within other pp_printf calls.
107
108 default_tree_printer handles 'T' and some other codes by calling
109 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
110 dump_generic_node calls pp_printf in various places, leading to
111 garbled output.
112
113 Ideally pp_printf could be made to be reentrant, but in the meantime
114 this function provides a workaround. */
115
116void
117print_quoted_type (pretty_printer *pp, tree t)
118{
119 pp_begin_quote (pp, pp_show_color (pp)(pp)->show_color);
120 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
121 pp_end_quote (pp, pp_show_color (pp)(pp)->show_color);
122}
123
124/* class region_to_value_map. */
125
126/* Assignment operator for region_to_value_map. */
127
128region_to_value_map &
129region_to_value_map::operator= (const region_to_value_map &other)
130{
131 m_hash_map.empty ();
132 for (auto iter : other.m_hash_map)
133 {
134 const region *reg = iter.first;
135 const svalue *sval = iter.second;
136 m_hash_map.put (reg, sval);
137 }
138 return *this;
139}
140
141/* Equality operator for region_to_value_map. */
142
143bool
144region_to_value_map::operator== (const region_to_value_map &other) const
145{
146 if (m_hash_map.elements () != other.m_hash_map.elements ())
147 return false;
148
149 for (auto iter : *this)
150 {
151 const region *reg = iter.first;
152 const svalue *sval = iter.second;
153 const svalue * const *other_slot = other.get (reg);
154 if (other_slot == NULLnullptr)
155 return false;
156 if (sval != *other_slot)
157 return false;
158 }
159
160 return true;
161}
162
163/* Dump this object to PP. */
164
165void
166region_to_value_map::dump_to_pp (pretty_printer *pp, bool simple,
167 bool multiline) const
168{
169 auto_vec<const region *> regs;
170 for (iterator iter = begin (); iter != end (); ++iter)
171 regs.safe_push ((*iter).first);
172 regs.qsort (region::cmp_ptr_ptr)qsort (region::cmp_ptr_ptr);
173 if (multiline)
174 pp_newline (pp);
175 else
176 pp_string (pp, " {");
177 unsigned i;
178 const region *reg;
179 FOR_EACH_VEC_ELT (regs, i, reg)for (i = 0; (regs).iterate ((i), &(reg)); ++(i))
180 {
181 if (multiline)
182 pp_string (pp, " ");
183 else if (i > 0)
184 pp_string (pp, ", ");
185 reg->dump_to_pp (pp, simple);
186 pp_string (pp, ": ");
187 const svalue *sval = *get (reg);
188 sval->dump_to_pp (pp, true);
189 if (multiline)
190 pp_newline (pp);
191 }
192 if (!multiline)
193 pp_string (pp, "}");
194}
195
196/* Dump this object to stderr. */
197
198DEBUG_FUNCTION__attribute__ ((__used__)) void
199region_to_value_map::dump (bool simple) const
200{
201 pretty_printer pp;
202 pp_format_decoder (&pp)(&pp)->format_decoder = default_tree_printer;
203 pp_show_color (&pp)(&pp)->show_color = pp_show_color (global_dc->printer)(global_dc->printer)->show_color;
204 pp.buffer->stream = stderrstderr;
205 dump_to_pp (&pp, simple, true);
206 pp_newline (&pp);
207 pp_flush (&pp);
208}
209
210
211/* Attempt to merge THIS with OTHER, writing the result
212 to OUT.
213
214 For now, write (region, value) mappings that are in common between THIS
215 and OTHER to OUT, effectively taking the intersection.
216
217 Reject merger of different values. */
218
219bool
220region_to_value_map::can_merge_with_p (const region_to_value_map &other,
221 region_to_value_map *out) const
222{
223 for (auto iter : *this)
224 {
225 const region *iter_reg = iter.first;
226 const svalue *iter_sval = iter.second;
227 const svalue * const * other_slot = other.get (iter_reg);
228 if (other_slot)
229 {
230 if (iter_sval == *other_slot)
231 out->put (iter_reg, iter_sval);
232 else
233 return false;
234 }
235 }
236 return true;
237}
238
239/* Purge any state involving SVAL. */
240
241void
242region_to_value_map::purge_state_involving (const svalue *sval)
243{
244 auto_vec<const region *> to_purge;
245 for (auto iter : *this)
246 {
247 const region *iter_reg = iter.first;
248 const svalue *iter_sval = iter.second;
249 if (iter_reg->involves_p (sval) || iter_sval->involves_p (sval))
250 to_purge.safe_push (iter_reg);
251 }
252 for (auto iter : to_purge)
253 m_hash_map.remove (iter);
254}
255
256/* class region_model. */
257
258/* Ctor for region_model: construct an "empty" model. */
259
260region_model::region_model (region_model_manager *mgr)
261: m_mgr (mgr), m_store (), m_current_frame (NULLnullptr),
262 m_dynamic_extents ()
263{
264 m_constraints = new constraint_manager (mgr);
265}
266
267/* region_model's copy ctor. */
268
269region_model::region_model (const region_model &other)
270: m_mgr (other.m_mgr), m_store (other.m_store),
271 m_constraints (new constraint_manager (*other.m_constraints)),
272 m_current_frame (other.m_current_frame),
273 m_dynamic_extents (other.m_dynamic_extents)
274{
275}
276
277/* region_model's dtor. */
278
279region_model::~region_model ()
280{
281 delete m_constraints;
282}
283
284/* region_model's assignment operator. */
285
286region_model &
1
Assuming other == *this
287region_model::operator= (const region_model &other)
288{
289 /* m_mgr is const. */
290 gcc_assert (m_mgr == other.m_mgr)((void)(!(m_mgr == other.m_mgr) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 290, __FUNCTION__), 0 : 0))
;
2
'?' condition is false
291
292 m_store = other.m_store;
293
294 delete m_constraints;
3
Memory is released
295 m_constraints = new constraint_manager (*other.m_constraints);
4
Use of memory after it is freed
296
297 m_current_frame = other.m_current_frame;
298
299 m_dynamic_extents = other.m_dynamic_extents;
300
301 return *this;
302}
303
304/* Equality operator for region_model.
305
306 Amongst other things this directly compares the stores and the constraint
307 managers, so for this to be meaningful both this and OTHER should
308 have been canonicalized. */
309
310bool
311region_model::operator== (const region_model &other) const
312{
313 /* We can only compare instances that use the same manager. */
314 gcc_assert (m_mgr == other.m_mgr)((void)(!(m_mgr == other.m_mgr) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 314, __FUNCTION__), 0 : 0))
;
315
316 if (m_store != other.m_store)
317 return false;
318
319 if (*m_constraints != *other.m_constraints)
320 return false;
321
322 if (m_current_frame != other.m_current_frame)
323 return false;
324
325 if (m_dynamic_extents != other.m_dynamic_extents)
326 return false;
327
328 gcc_checking_assert (hash () == other.hash ())((void)(!(hash () == other.hash ()) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 328, __FUNCTION__), 0 : 0))
;
329
330 return true;
331}
332
333/* Generate a hash value for this region_model. */
334
335hashval_t
336region_model::hash () const
337{
338 hashval_t result = m_store.hash ();
339 result ^= m_constraints->hash ();
340 return result;
341}
342
343/* Dump a representation of this model to PP, showing the
344 stack, the store, and any constraints.
345 Use SIMPLE to control how svalues and regions are printed. */
346
347void
348region_model::dump_to_pp (pretty_printer *pp, bool simple,
349 bool multiline) const
350{
351 /* Dump stack. */
352 pp_printf (pp, "stack depth: %i", get_stack_depth ());
353 if (multiline)
354 pp_newline (pp);
355 else
356 pp_string (pp, " {");
357 for (const frame_region *iter_frame = m_current_frame; iter_frame;
358 iter_frame = iter_frame->get_calling_frame ())
359 {
360 if (multiline)
361 pp_string (pp, " ");
362 else if (iter_frame != m_current_frame)
363 pp_string (pp, ", ");
364 pp_printf (pp, "frame (index %i): ", iter_frame->get_index ());
365 iter_frame->dump_to_pp (pp, simple);
366 if (multiline)
367 pp_newline (pp);
368 }
369 if (!multiline)
370 pp_string (pp, "}");
371
372 /* Dump store. */
373 if (!multiline)
374 pp_string (pp, ", {");
375 m_store.dump_to_pp (pp, simple, multiline,
376 m_mgr->get_store_manager ());
377 if (!multiline)
378 pp_string (pp, "}");
379
380 /* Dump constraints. */
381 pp_string (pp, "constraint_manager:");
382 if (multiline)
383 pp_newline (pp);
384 else
385 pp_string (pp, " {");
386 m_constraints->dump_to_pp (pp, multiline);
387 if (!multiline)
388 pp_string (pp, "}");
389
390 /* Dump sizes of dynamic regions, if any are known. */
391 if (!m_dynamic_extents.is_empty ())
392 {
393 pp_string (pp, "dynamic_extents:");
394 m_dynamic_extents.dump_to_pp (pp, simple, multiline);
395 }
396}
397
398/* Dump a representation of this model to FILE. */
399
400void
401region_model::dump (FILE *fp, bool simple, bool multiline) const
402{
403 pretty_printer pp;
404 pp_format_decoder (&pp)(&pp)->format_decoder = default_tree_printer;
405 pp_show_color (&pp)(&pp)->show_color = pp_show_color (global_dc->printer)(global_dc->printer)->show_color;
406 pp.buffer->stream = fp;
407 dump_to_pp (&pp, simple, multiline);
408 pp_newline (&pp);
409 pp_flush (&pp);
410}
411
412/* Dump a multiline representation of this model to stderr. */
413
414DEBUG_FUNCTION__attribute__ ((__used__)) void
415region_model::dump (bool simple) const
416{
417 dump (stderrstderr, simple, true);
418}
419
420/* Dump a multiline representation of this model to stderr. */
421
422DEBUG_FUNCTION__attribute__ ((__used__)) void
423region_model::debug () const
424{
425 dump (true);
426}
427
428/* Assert that this object is valid. */
429
430void
431region_model::validate () const
432{
433 m_store.validate ();
434}
435
436/* Canonicalize the store and constraints, to maximize the chance of
437 equality between region_model instances. */
438
439void
440region_model::canonicalize ()
441{
442 m_store.canonicalize (m_mgr->get_store_manager ());
443 m_constraints->canonicalize ();
444}
445
446/* Return true if this region_model is in canonical form. */
447
448bool
449region_model::canonicalized_p () const
450{
451 region_model copy (*this);
452 copy.canonicalize ();
453 return *this == copy;
454}
455
456/* See the comment for store::loop_replay_fixup. */
457
458void
459region_model::loop_replay_fixup (const region_model *dst_state)
460{
461 m_store.loop_replay_fixup (dst_state->get_store (), m_mgr);
462}
463
464/* A subclass of pending_diagnostic for complaining about uses of
465 poisoned values. */
466
467class poisoned_value_diagnostic
468: public pending_diagnostic_subclass<poisoned_value_diagnostic>
469{
470public:
471 poisoned_value_diagnostic (tree expr, enum poison_kind pkind,
472 const region *src_region,
473 tree check_expr)
474 : m_expr (expr), m_pkind (pkind),
475 m_src_region (src_region),
476 m_check_expr (check_expr)
477 {}
478
479 const char *get_kind () const final override { return "poisoned_value_diagnostic"; }
480
481 bool use_of_uninit_p () const final override
482 {
483 return m_pkind == POISON_KIND_UNINIT;
484 }
485
486 bool operator== (const poisoned_value_diagnostic &other) const
487 {
488 return (m_expr == other.m_expr
489 && m_pkind == other.m_pkind
490 && m_src_region == other.m_src_region);
491 }
492
493 int get_controlling_option () const final override
494 {
495 switch (m_pkind)
496 {
497 default:
498 gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 498, __FUNCTION__))
;
499 case POISON_KIND_UNINIT:
500 return OPT_Wanalyzer_use_of_uninitialized_value;
501 case POISON_KIND_FREED:
502 return OPT_Wanalyzer_use_after_free;
503 case POISON_KIND_POPPED_STACK:
504 return OPT_Wanalyzer_use_of_pointer_in_stale_stack_frame;
505 }
506 }
507
508 bool terminate_path_p () const final override { return true; }
509
510 bool emit (rich_location *rich_loc) final override
511 {
512 switch (m_pkind)
513 {
514 default:
515 gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 515, __FUNCTION__))
;
516 case POISON_KIND_UNINIT:
517 {
518 diagnostic_metadata m;
519 m.add_cwe (457); /* "CWE-457: Use of Uninitialized Variable". */
520 return warning_meta (rich_loc, m, get_controlling_option (),
521 "use of uninitialized value %qE",
522 m_expr);
523 }
524 break;
525 case POISON_KIND_FREED:
526 {
527 diagnostic_metadata m;
528 m.add_cwe (416); /* "CWE-416: Use After Free". */
529 return warning_meta (rich_loc, m, get_controlling_option (),
530 "use after %<free%> of %qE",
531 m_expr);
532 }
533 break;
534 case POISON_KIND_POPPED_STACK:
535 {
536 /* TODO: which CWE? */
537 return warning_at
538 (rich_loc, get_controlling_option (),
539 "dereferencing pointer %qE to within stale stack frame",
540 m_expr);
541 }
542 break;
543 }
544 }
545
546 label_text describe_final_event (const evdesc::final_event &ev) final override
547 {
548 switch (m_pkind)
549 {
550 default:
551 gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 551, __FUNCTION__))
;
552 case POISON_KIND_UNINIT:
553 return ev.formatted_print ("use of uninitialized value %qE here",
554 m_expr);
555 case POISON_KIND_FREED:
556 return ev.formatted_print ("use after %<free%> of %qE here",
557 m_expr);
558 case POISON_KIND_POPPED_STACK:
559 return ev.formatted_print
560 ("dereferencing pointer %qE to within stale stack frame",
561 m_expr);
562 }
563 }
564
565 void mark_interesting_stuff (interesting_t *interest) final override
566 {
567 if (m_src_region)
568 interest->add_region_creation (m_src_region);
569 }
570
571 /* Attempt to suppress false positives.
572 Reject paths where the value of the underlying region isn't poisoned.
573 This can happen due to state merging when exploring the exploded graph,
574 where the more precise analysis during feasibility analysis finds that
575 the region is in fact valid.
576 To do this we need to get the value from the fgraph. Unfortunately
577 we can't simply query the state of m_src_region (from the enode),
578 since it might be a different region in the fnode state (e.g. with
579 heap-allocated regions, the numbering could be different).
580 Hence we access m_check_expr, if available. */
581
582 bool check_valid_fpath_p (const feasible_node &fnode,
583 const gimple *emission_stmt)
584 const final override
585 {
586 if (!m_check_expr)
587 return true;
588
589 /* We've reached the enode, but not necessarily the right function_point.
590 Try to get the state at the correct stmt. */
591 region_model emission_model (fnode.get_model ().get_manager());
592 if (!fnode.get_state_at_stmt (emission_stmt, &emission_model))
593 /* Couldn't get state; accept this diagnostic. */
594 return true;
595
596 const svalue *fsval = emission_model.get_rvalue (m_check_expr, NULLnullptr);
597 /* Check to see if the expr is also poisoned in FNODE (and in the
598 same way). */
599 const poisoned_svalue * fspval = fsval->dyn_cast_poisoned_svalue ();
600 if (!fspval)
601 return false;
602 if (fspval->get_poison_kind () != m_pkind)
603 return false;
604 return true;
605 }
606
607private:
608 tree m_expr;
609 enum poison_kind m_pkind;
610 const region *m_src_region;
611 tree m_check_expr;
612};
613
614/* A subclass of pending_diagnostic for complaining about shifts
615 by negative counts. */
616
617class shift_count_negative_diagnostic
618: public pending_diagnostic_subclass<shift_count_negative_diagnostic>
619{
620public:
621 shift_count_negative_diagnostic (const gassign *assign, tree count_cst)
622 : m_assign (assign), m_count_cst (count_cst)
623 {}
624
625 const char *get_kind () const final override
626 {
627 return "shift_count_negative_diagnostic";
628 }
629
630 bool operator== (const shift_count_negative_diagnostic &other) const
631 {
632 return (m_assign == other.m_assign
633 && same_tree_p (m_count_cst, other.m_count_cst));
634 }
635
636 int get_controlling_option () const final override
637 {
638 return OPT_Wanalyzer_shift_count_negative;
639 }
640
641 bool emit (rich_location *rich_loc) final override
642 {
643 return warning_at (rich_loc, get_controlling_option (),
644 "shift by negative count (%qE)", m_count_cst);
645 }
646
647 label_text describe_final_event (const evdesc::final_event &ev) final override
648 {
649 return ev.formatted_print ("shift by negative amount here (%qE)", m_count_cst);
650 }
651
652private:
653 const gassign *m_assign;
654 tree m_count_cst;
655};
656
657/* A subclass of pending_diagnostic for complaining about shifts
658 by counts >= the width of the operand type. */
659
660class shift_count_overflow_diagnostic
661: public pending_diagnostic_subclass<shift_count_overflow_diagnostic>
662{
663public:
664 shift_count_overflow_diagnostic (const gassign *assign,
665 int operand_precision,
666 tree count_cst)
667 : m_assign (assign), m_operand_precision (operand_precision),
668 m_count_cst (count_cst)
669 {}
670
671 const char *get_kind () const final override
672 {
673 return "shift_count_overflow_diagnostic";
674 }
675
676 bool operator== (const shift_count_overflow_diagnostic &other) const
677 {
678 return (m_assign == other.m_assign
679 && m_operand_precision == other.m_operand_precision
680 && same_tree_p (m_count_cst, other.m_count_cst));
681 }
682
683 int get_controlling_option () const final override
684 {
685 return OPT_Wanalyzer_shift_count_overflow;
686 }
687
688 bool emit (rich_location *rich_loc) final override
689 {
690 return warning_at (rich_loc, get_controlling_option (),
691 "shift by count (%qE) >= precision of type (%qi)",
692 m_count_cst, m_operand_precision);
693 }
694
695 label_text describe_final_event (const evdesc::final_event &ev) final override
696 {
697 return ev.formatted_print ("shift by count %qE here", m_count_cst);
698 }
699
700private:
701 const gassign *m_assign;
702 int m_operand_precision;
703 tree m_count_cst;
704};
705
706/* If ASSIGN is a stmt that can be modelled via
707 set_value (lhs_reg, SVALUE, CTXT)
708 for some SVALUE, get the SVALUE.
709 Otherwise return NULL. */
710
711const svalue *
712region_model::get_gassign_result (const gassign *assign,
713 region_model_context *ctxt)
714{
715 tree lhs = gimple_assign_lhs (assign);
716 tree rhs1 = gimple_assign_rhs1 (assign);
717 enum tree_code op = gimple_assign_rhs_code (assign);
718 switch (op)
719 {
720 default:
721 return NULLnullptr;
722
723 case POINTER_PLUS_EXPR:
724 {
725 /* e.g. "_1 = a_10(D) + 12;" */
726 tree ptr = rhs1;
727 tree offset = gimple_assign_rhs2 (assign);
728
729 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
730 const svalue *offset_sval = get_rvalue (offset, ctxt);
731 /* Quoting tree.def, "the second operand [of a POINTER_PLUS_EXPR]
732 is an integer of type sizetype". */
733 offset_sval = m_mgr->get_or_create_cast (size_type_nodeglobal_trees[TI_SIZE_TYPE], offset_sval);
734
735 const svalue *sval_binop
736 = m_mgr->get_or_create_binop (TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 736, __FUNCTION__))->typed.type)
, op,
737 ptr_sval, offset_sval);
738 return sval_binop;
739 }
740 break;
741
742 case POINTER_DIFF_EXPR:
743 {
744 /* e.g. "_1 = p_2(D) - q_3(D);". */
745 tree rhs2 = gimple_assign_rhs2 (assign);
746 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
747 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
748
749 // TODO: perhaps fold to zero if they're known to be equal?
750
751 const svalue *sval_binop
752 = m_mgr->get_or_create_binop (TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 752, __FUNCTION__))->typed.type)
, op,
753 rhs1_sval, rhs2_sval);
754 return sval_binop;
755 }
756 break;
757
758 /* Assignments of the form
759 set_value (lvalue (LHS), rvalue (EXPR))
760 for various EXPR.
761 We already have the lvalue for the LHS above, as "lhs_reg". */
762 case ADDR_EXPR: /* LHS = &RHS; */
763 case BIT_FIELD_REF:
764 case COMPONENT_REF: /* LHS = op0.op1; */
765 case MEM_REF:
766 case REAL_CST:
767 case COMPLEX_CST:
768 case VECTOR_CST:
769 case INTEGER_CST:
770 case ARRAY_REF:
771 case SSA_NAME: /* LHS = VAR; */
772 case VAR_DECL: /* LHS = VAR; */
773 case PARM_DECL:/* LHS = VAR; */
774 case REALPART_EXPR:
775 case IMAGPART_EXPR:
776 return get_rvalue (rhs1, ctxt);
777
778 case ABS_EXPR:
779 case ABSU_EXPR:
780 case CONJ_EXPR:
781 case BIT_NOT_EXPR:
782 case FIX_TRUNC_EXPR:
783 case FLOAT_EXPR:
784 case NEGATE_EXPR:
785 case NOP_EXPR:
786 case VIEW_CONVERT_EXPR:
787 {
788 /* Unary ops. */
789 const svalue *rhs_sval = get_rvalue (rhs1, ctxt);
790 const svalue *sval_unaryop
791 = m_mgr->get_or_create_unaryop (TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 791, __FUNCTION__))->typed.type)
, op, rhs_sval);
792 return sval_unaryop;
793 }
794
795 case EQ_EXPR:
796 case GE_EXPR:
797 case LE_EXPR:
798 case NE_EXPR:
799 case GT_EXPR:
800 case LT_EXPR:
801 case UNORDERED_EXPR:
802 case ORDERED_EXPR:
803 {
804 tree rhs2 = gimple_assign_rhs2 (assign);
805
806 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
807 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
808
809 if (TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 809, __FUNCTION__))->typed.type)
== boolean_type_nodeglobal_trees[TI_BOOLEAN_TYPE])
810 {
811 /* Consider constraints between svalues. */
812 tristate t = eval_condition (rhs1_sval, op, rhs2_sval);
813 if (t.is_known ())
814 return m_mgr->get_or_create_constant_svalue
815 (t.is_true () ? boolean_true_nodeglobal_trees[TI_BOOLEAN_TRUE] : boolean_false_nodeglobal_trees[TI_BOOLEAN_FALSE]);
816 }
817
818 /* Otherwise, generate a symbolic binary op. */
819 const svalue *sval_binop
820 = m_mgr->get_or_create_binop (TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 820, __FUNCTION__))->typed.type)
, op,
821 rhs1_sval, rhs2_sval);
822 return sval_binop;
823 }
824 break;
825
826 case PLUS_EXPR:
827 case MINUS_EXPR:
828 case MULT_EXPR:
829 case MULT_HIGHPART_EXPR:
830 case TRUNC_DIV_EXPR:
831 case CEIL_DIV_EXPR:
832 case FLOOR_DIV_EXPR:
833 case ROUND_DIV_EXPR:
834 case TRUNC_MOD_EXPR:
835 case CEIL_MOD_EXPR:
836 case FLOOR_MOD_EXPR:
837 case ROUND_MOD_EXPR:
838 case RDIV_EXPR:
839 case EXACT_DIV_EXPR:
840 case LSHIFT_EXPR:
841 case RSHIFT_EXPR:
842 case LROTATE_EXPR:
843 case RROTATE_EXPR:
844 case BIT_IOR_EXPR:
845 case BIT_XOR_EXPR:
846 case BIT_AND_EXPR:
847 case MIN_EXPR:
848 case MAX_EXPR:
849 case COMPLEX_EXPR:
850 {
851 /* Binary ops. */
852 tree rhs2 = gimple_assign_rhs2 (assign);
853
854 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
855 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
856
857 if (ctxt && (op == LSHIFT_EXPR || op == RSHIFT_EXPR))
858 {
859 /* "INT34-C. Do not shift an expression by a negative number of bits
860 or by greater than or equal to the number of bits that exist in
861 the operand." */
862 if (const tree rhs2_cst = rhs2_sval->maybe_get_constant ())
863 if (TREE_CODE (rhs2_cst)((enum tree_code) (rhs2_cst)->base.code) == INTEGER_CST)
864 {
865 if (tree_int_cst_sgn (rhs2_cst) < 0)
866 ctxt->warn
867 (make_unique<shift_count_negative_diagnostic>
868 (assign, rhs2_cst));
869 else if (compare_tree_int (rhs2_cst,
870 TYPE_PRECISION (TREE_TYPE (rhs1))((tree_class_check ((((contains_struct_check ((rhs1), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 870, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 870, __FUNCTION__))->type_common.precision)
)
871 >= 0)
872 ctxt->warn
873 (make_unique<shift_count_overflow_diagnostic>
874 (assign,
875 int (TYPE_PRECISION (TREE_TYPE (rhs1))((tree_class_check ((((contains_struct_check ((rhs1), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 875, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 875, __FUNCTION__))->type_common.precision)
),
876 rhs2_cst));
877 }
878 }
879
880 const svalue *sval_binop
881 = m_mgr->get_or_create_binop (TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 881, __FUNCTION__))->typed.type)
, op,
882 rhs1_sval, rhs2_sval);
883 return sval_binop;
884 }
885
886 /* Vector expressions. In theory we could implement these elementwise,
887 but for now, simply return unknown values. */
888 case VEC_DUPLICATE_EXPR:
889 case VEC_SERIES_EXPR:
890 case VEC_COND_EXPR:
891 case VEC_PERM_EXPR:
892 case VEC_WIDEN_MULT_HI_EXPR:
893 case VEC_WIDEN_MULT_LO_EXPR:
894 case VEC_WIDEN_MULT_EVEN_EXPR:
895 case VEC_WIDEN_MULT_ODD_EXPR:
896 case VEC_UNPACK_HI_EXPR:
897 case VEC_UNPACK_LO_EXPR:
898 case VEC_UNPACK_FLOAT_HI_EXPR:
899 case VEC_UNPACK_FLOAT_LO_EXPR:
900 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
901 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
902 case VEC_PACK_TRUNC_EXPR:
903 case VEC_PACK_SAT_EXPR:
904 case VEC_PACK_FIX_TRUNC_EXPR:
905 case VEC_PACK_FLOAT_EXPR:
906 case VEC_WIDEN_LSHIFT_HI_EXPR:
907 case VEC_WIDEN_LSHIFT_LO_EXPR:
908 return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 908, __FUNCTION__))->typed.type)
);
909 }
910}
911
912/* Workaround for discarding certain false positives from
913 -Wanalyzer-use-of-uninitialized-value
914 of the form:
915 ((A OR-IF B) OR-IF C)
916 and:
917 ((A AND-IF B) AND-IF C)
918 where evaluating B is redundant, but could involve simple accesses of
919 uninitialized locals.
920
921 When optimization is turned on the FE can immediately fold compound
922 conditionals. Specifically, c_parser_condition parses this condition:
923 ((A OR-IF B) OR-IF C)
924 and calls c_fully_fold on the condition.
925 Within c_fully_fold, fold_truth_andor is called, which bails when
926 optimization is off, but if any optimization is turned on can convert the
927 ((A OR-IF B) OR-IF C)
928 into:
929 ((A OR B) OR_IF C)
930 for sufficiently simple B
931 i.e. the inner OR-IF becomes an OR.
932 At gimplification time the inner OR becomes BIT_IOR_EXPR (in gimplify_expr),
933 giving this for the inner condition:
934 tmp = A | B;
935 if (tmp)
936 thus effectively synthesizing a redundant access of B when optimization
937 is turned on, when compared to:
938 if (A) goto L1; else goto L4;
939 L1: if (B) goto L2; else goto L4;
940 L2: if (C) goto L3; else goto L4;
941 for the unoptimized case.
942
943 Return true if CTXT appears to be handling such a short-circuitable stmt,
944 such as the def-stmt for B for the:
945 tmp = A | B;
946 case above, for the case where A is true and thus B would have been
947 short-circuited without optimization, using MODEL for the value of A. */
948
949static bool
950within_short_circuited_stmt_p (const region_model *model,
951 const gassign *assign_stmt)
952{
953 /* We must have an assignment to a temporary of _Bool type. */
954 tree lhs = gimple_assign_lhs (assign_stmt);
955 if (TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 955, __FUNCTION__))->typed.type)
!= boolean_type_nodeglobal_trees[TI_BOOLEAN_TYPE])
956 return false;
957 if (TREE_CODE (lhs)((enum tree_code) (lhs)->base.code) != SSA_NAME)
958 return false;
959 if (SSA_NAME_VAR (lhs)((tree_check ((lhs), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 959, __FUNCTION__, (SSA_NAME)))->ssa_name.var == (tree) nullptr
|| ((enum tree_code) ((lhs)->ssa_name.var)->base.code)
== IDENTIFIER_NODE ? (tree) nullptr : (lhs)->ssa_name.var
)
!= NULL_TREE(tree) nullptr)
960 return false;
961
962 /* The temporary bool must be used exactly once: as the second arg of
963 a BIT_IOR_EXPR or BIT_AND_EXPR. */
964 use_operand_p use_op;
965 gimple *use_stmt;
966 if (!single_imm_use (lhs, &use_op, &use_stmt))
967 return false;
968 const gassign *use_assign = dyn_cast <const gassign *> (use_stmt);
969 if (!use_assign)
970 return false;
971 enum tree_code op = gimple_assign_rhs_code (use_assign);
972 if (!(op == BIT_IOR_EXPR ||op == BIT_AND_EXPR))
973 return false;
974 if (!(gimple_assign_rhs1 (use_assign) != lhs
975 && gimple_assign_rhs2 (use_assign) == lhs))
976 return false;
977
978 /* The first arg of the bitwise stmt must have a known value in MODEL
979 that implies that the value of the second arg doesn't matter, i.e.
980 1 for bitwise or, 0 for bitwise and. */
981 tree other_arg = gimple_assign_rhs1 (use_assign);
982 /* Use a NULL ctxt here to avoid generating warnings. */
983 const svalue *other_arg_sval = model->get_rvalue (other_arg, NULLnullptr);
984 tree other_arg_cst = other_arg_sval->maybe_get_constant ();
985 if (!other_arg_cst)
986 return false;
987 switch (op)
988 {
989 default:
990 gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 990, __FUNCTION__))
;
991 case BIT_IOR_EXPR:
992 if (zerop (other_arg_cst))
993 return false;
994 break;
995 case BIT_AND_EXPR:
996 if (!zerop (other_arg_cst))
997 return false;
998 break;
999 }
1000
1001 /* All tests passed. We appear to be in a stmt that generates a boolean
1002 temporary with a value that won't matter. */
1003 return true;
1004}
1005
1006/* Workaround for discarding certain false positives from
1007 -Wanalyzer-use-of-uninitialized-value
1008 seen with -ftrivial-auto-var-init=.
1009
1010 -ftrivial-auto-var-init= will generate calls to IFN_DEFERRED_INIT.
1011
1012 If the address of the var is taken, gimplification will give us
1013 something like:
1014
1015 _1 = .DEFERRED_INIT (4, 2, &"len"[0]);
1016 len = _1;
1017
1018 The result of DEFERRED_INIT will be an uninit value; we don't
1019 want to emit a false positive for "len = _1;"
1020
1021 Return true if ASSIGN_STMT is such a stmt. */
1022
1023static bool
1024due_to_ifn_deferred_init_p (const gassign *assign_stmt)
1025
1026{
1027 /* We must have an assignment to a decl from an SSA name that's the
1028 result of a IFN_DEFERRED_INIT call. */
1029 if (gimple_assign_rhs_code (assign_stmt) != SSA_NAME)
1030 return false;
1031 tree lhs = gimple_assign_lhs (assign_stmt);
1032 if (TREE_CODE (lhs)((enum tree_code) (lhs)->base.code) != VAR_DECL)
1033 return false;
1034 tree rhs = gimple_assign_rhs1 (assign_stmt);
1035 if (TREE_CODE (rhs)((enum tree_code) (rhs)->base.code) != SSA_NAME)
1036 return false;
1037 const gimple *def_stmt = SSA_NAME_DEF_STMT (rhs)(tree_check ((rhs), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 1037, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
1038 const gcall *call = dyn_cast <const gcall *> (def_stmt);
1039 if (!call)
1040 return false;
1041 if (gimple_call_internal_p (call)
1042 && gimple_call_internal_fn (call) == IFN_DEFERRED_INIT)
1043 return true;
1044 return false;
1045}
1046
1047/* Check for SVAL being poisoned, adding a warning to CTXT.
1048 Return SVAL, or, if a warning is added, another value, to avoid
1049 repeatedly complaining about the same poisoned value in followup code.
1050 SRC_REGION is a hint about where SVAL came from, and can be NULL. */
1051
1052const svalue *
1053region_model::check_for_poison (const svalue *sval,
1054 tree expr,
1055 const region *src_region,
1056 region_model_context *ctxt) const
1057{
1058 if (!ctxt)
1059 return sval;
1060
1061 if (const poisoned_svalue *poisoned_sval = sval->dyn_cast_poisoned_svalue ())
1062 {
1063 enum poison_kind pkind = poisoned_sval->get_poison_kind ();
1064
1065 /* Ignore uninitialized uses of empty types; there's nothing
1066 to initialize. */
1067 if (pkind == POISON_KIND_UNINIT
1068 && sval->get_type ()
1069 && is_empty_type (sval->get_type ()))
1070 return sval;
1071
1072 if (pkind == POISON_KIND_UNINIT)
1073 if (const gimple *curr_stmt = ctxt->get_stmt ())
1074 if (const gassign *assign_stmt
1075 = dyn_cast <const gassign *> (curr_stmt))
1076 {
1077 /* Special case to avoid certain false positives. */
1078 if (within_short_circuited_stmt_p (this, assign_stmt))
1079 return sval;
1080
1081 /* Special case to avoid false positive on
1082 -ftrivial-auto-var-init=. */
1083 if (due_to_ifn_deferred_init_p (assign_stmt))
1084 return sval;
1085 }
1086
1087 /* If we have an SSA name for a temporary, we don't want to print
1088 '<unknown>'.
1089 Poisoned values are shared by type, and so we can't reconstruct
1090 the tree other than via the def stmts, using
1091 fixup_tree_for_diagnostic. */
1092 tree diag_arg = fixup_tree_for_diagnostic (expr);
1093 if (src_region == NULLnullptr && pkind == POISON_KIND_UNINIT)
1094 src_region = get_region_for_poisoned_expr (expr);
1095
1096 /* Can we reliably get the poisoned value from "expr"?
1097 This is for use by poisoned_value_diagnostic::check_valid_fpath_p.
1098 Unfortunately, we might not have a reliable value for EXPR.
1099 Hence we only query its value now, and only use it if we get the
1100 poisoned value back again. */
1101 tree check_expr = expr;
1102 const svalue *foo_sval = get_rvalue (expr, NULLnullptr);
1103 if (foo_sval == sval)
1104 check_expr = expr;
1105 else
1106 check_expr = NULLnullptr;
1107 if (ctxt->warn (make_unique<poisoned_value_diagnostic> (diag_arg,
1108 pkind,
1109 src_region,
1110 check_expr)))
1111 {
1112 /* We only want to report use of a poisoned value at the first
1113 place it gets used; return an unknown value to avoid generating
1114 a chain of followup warnings. */
1115 sval = m_mgr->get_or_create_unknown_svalue (sval->get_type ());
1116 }
1117
1118 return sval;
1119 }
1120
1121 return sval;
1122}
1123
1124/* Attempt to get a region for describing EXPR, the source of region of
1125 a poisoned_svalue for use in a poisoned_value_diagnostic.
1126 Return NULL if there is no good region to use. */
1127
1128const region *
1129region_model::get_region_for_poisoned_expr (tree expr) const
1130{
1131 if (TREE_CODE (expr)((enum tree_code) (expr)->base.code) == SSA_NAME)
1132 {
1133 tree decl = SSA_NAME_VAR (expr)((tree_check ((expr), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 1133, __FUNCTION__, (SSA_NAME)))->ssa_name.var == (tree)
nullptr || ((enum tree_code) ((expr)->ssa_name.var)->base
.code) == IDENTIFIER_NODE ? (tree) nullptr : (expr)->ssa_name
.var)
;
1134 if (decl && DECL_P (decl)(tree_code_type_tmpl <0>::tree_code_type[(int) (((enum tree_code
) (decl)->base.code))] == tcc_declaration)
)
1135 expr = decl;
1136 else
1137 return NULLnullptr;
1138 }
1139 return get_lvalue (expr, NULLnullptr);
1140}
1141
1142/* Update this model for the ASSIGN stmt, using CTXT to report any
1143 diagnostics. */
1144
1145void
1146region_model::on_assignment (const gassign *assign, region_model_context *ctxt)
1147{
1148 tree lhs = gimple_assign_lhs (assign);
1149 tree rhs1 = gimple_assign_rhs1 (assign);
1150
1151 const region *lhs_reg = get_lvalue (lhs, ctxt);
1152
1153 /* Most assignments are handled by:
1154 set_value (lhs_reg, SVALUE, CTXT)
1155 for some SVALUE. */
1156 if (const svalue *sval = get_gassign_result (assign, ctxt))
1157 {
1158 tree expr = get_diagnostic_tree_for_gassign (assign);
1159 check_for_poison (sval, expr, NULLnullptr, ctxt);
1160 set_value (lhs_reg, sval, ctxt);
1161 return;
1162 }
1163
1164 enum tree_code op = gimple_assign_rhs_code (assign);
1165 switch (op)
1166 {
1167 default:
1168 {
1169 if (0)
1170 sorry_at (assign->location, "unhandled assignment op: %qs",
1171 get_tree_code_name (op));
1172 const svalue *unknown_sval
1173 = m_mgr->get_or_create_unknown_svalue (TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 1173, __FUNCTION__))->typed.type)
);
1174 set_value (lhs_reg, unknown_sval, ctxt);
1175 }
1176 break;
1177
1178 case CONSTRUCTOR:
1179 {
1180 if (TREE_CLOBBER_P (rhs1)(((enum tree_code) (rhs1)->base.code) == CONSTRUCTOR &&
((rhs1)->base.volatile_flag))
)
1181 {
1182 /* e.g. "x ={v} {CLOBBER};" */
1183 clobber_region (lhs_reg);
1184 }
1185 else
1186 {
1187 /* Any CONSTRUCTOR that survives to this point is either
1188 just a zero-init of everything, or a vector. */
1189 if (!CONSTRUCTOR_NO_CLEARING (rhs1)((tree_check ((rhs1), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 1189, __FUNCTION__, (CONSTRUCTOR)))->base.public_flag)
)
1190 zero_fill_region (lhs_reg);
1191 unsigned ix;
1192 tree index;
1193 tree val;
1194 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), ix, index, val)for (ix = 0; (ix >= vec_safe_length (((tree_check ((rhs1),
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 1194, __FUNCTION__, (CONSTRUCTOR)))->constructor.elts)))
? false : (((void) (val = (*((tree_check ((rhs1), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 1194, __FUNCTION__, (CONSTRUCTOR)))->constructor.elts))[
ix].value)), (index = (*((tree_check ((rhs1), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 1194, __FUNCTION__, (CONSTRUCTOR)))->constructor.elts))[
ix].index), true); (ix)++)
1195 {
1196 gcc_assert (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)((void)(!(((enum tree_code) (((contains_struct_check ((rhs1),
(TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 1196, __FUNCTION__))->typed.type))->base.code) == VECTOR_TYPE
) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 1196, __FUNCTION__), 0 : 0))
;
1197 if (!index)
1198 index = build_int_cst (integer_type_nodeinteger_types[itk_int], ix);
1199 gcc_assert (TREE_CODE (index) == INTEGER_CST)((void)(!(((enum tree_code) (index)->base.code) == INTEGER_CST
) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 1199, __FUNCTION__), 0 : 0))
;
1200 const svalue *index_sval
1201 = m_mgr->get_or_create_constant_svalue (index);
1202 gcc_assert (index_sval)((void)(!(index_sval) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 1202, __FUNCTION__), 0 : 0))
;
1203 const region *sub_reg
1204 = m_mgr->get_element_region (lhs_reg,
1205 TREE_TYPE (val)((contains_struct_check ((val), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 1205, __FUNCTION__))->typed.type)
,
1206 index_sval);
1207 const svalue *val_sval = get_rvalue (val, ctxt);
1208 set_value (sub_reg, val_sval, ctxt);
1209 }
1210 }
1211 }
1212 break;
1213
1214 case STRING_CST:
1215 {
1216 /* e.g. "struct s2 x = {{'A', 'B', 'C', 'D'}};". */
1217 const svalue *rhs_sval = get_rvalue (rhs1, ctxt);
1218 m_store.set_value (m_mgr->get_store_manager(), lhs_reg, rhs_sval,
1219 ctxt ? ctxt->get_uncertainty () : NULLnullptr);
1220 }
1221 break;
1222 }
1223}
1224
1225/* Handle the pre-sm-state part of STMT, modifying this object in-place.
1226 Write true to *OUT_UNKNOWN_SIDE_EFFECTS if the stmt has unknown
1227 side effects. */
1228
1229void
1230region_model::on_stmt_pre (const gimple *stmt,
1231 bool *out_unknown_side_effects,
1232 region_model_context *ctxt)
1233{
1234 switch (gimple_code (stmt))
1235 {
1236 default:
1237 /* No-op for now. */
1238 break;
1239
1240 case GIMPLE_ASSIGN:
1241 {
1242 const gassign *assign = as_a <const gassign *> (stmt);
1243 on_assignment (assign, ctxt);
1244 }
1245 break;
1246
1247 case GIMPLE_ASM:
1248 {
1249 const gasm *asm_stmt = as_a <const gasm *> (stmt);
1250 on_asm_stmt (asm_stmt, ctxt);
1251 }
1252 break;
1253
1254 case GIMPLE_CALL:
1255 {
1256 /* Track whether we have a gcall to a function that's not recognized by
1257 anything, for which we don't have a function body, or for which we
1258 don't know the fndecl. */
1259 const gcall *call = as_a <const gcall *> (stmt);
1260 *out_unknown_side_effects = on_call_pre (call, ctxt);
1261 }
1262 break;
1263
1264 case GIMPLE_RETURN:
1265 {
1266 const greturn *return_ = as_a <const greturn *> (stmt);
1267 on_return (return_, ctxt);
1268 }
1269 break;
1270 }
1271}
1272
1273/* Ensure that all arguments at the call described by CD are checked
1274 for poisoned values, by calling get_rvalue on each argument. */
1275
1276void
1277region_model::check_call_args (const call_details &cd) const
1278{
1279 for (unsigned arg_idx = 0; arg_idx < cd.num_args (); arg_idx++)
1280 cd.get_arg_svalue (arg_idx);
1281}
1282
1283/* Return true if CD is known to be a call to a function with
1284 __attribute__((const)). */
1285
1286static bool
1287const_fn_p (const call_details &cd)
1288{
1289 tree fndecl = cd.get_fndecl_for_call ();
1290 if (!fndecl)
1291 return false;
1292 gcc_assert (DECL_P (fndecl))((void)(!((tree_code_type_tmpl <0>::tree_code_type[(int
) (((enum tree_code) (fndecl)->base.code))] == tcc_declaration
)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 1292, __FUNCTION__), 0 : 0))
;
1293 return TREE_READONLY (fndecl)((non_type_check ((fndecl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 1293, __FUNCTION__))->base.readonly_flag)
;
1294}
1295
1296/* If this CD is known to be a call to a function with
1297 __attribute__((const)), attempt to get a const_fn_result_svalue
1298 based on the arguments, or return NULL otherwise. */
1299
1300static const svalue *
1301maybe_get_const_fn_result (const call_details &cd)
1302{
1303 if (!const_fn_p (cd))
1304 return NULLnullptr;
1305
1306 unsigned num_args = cd.num_args ();
1307 if (num_args > const_fn_result_svalue::MAX_INPUTS)
1308 /* Too many arguments. */
1309 return NULLnullptr;
1310
1311 auto_vec<const svalue *> inputs (num_args);
1312 for (unsigned arg_idx = 0; arg_idx < num_args; arg_idx++)
1313 {
1314 const svalue *arg_sval = cd.get_arg_svalue (arg_idx);
1315 if (!arg_sval->can_have_associated_state_p ())
1316 return NULLnullptr;
1317 inputs.quick_push (arg_sval);
1318 }
1319
1320 region_model_manager *mgr = cd.get_manager ();
1321 const svalue *sval
1322 = mgr->get_or_create_const_fn_result_svalue (cd.get_lhs_type (),
1323 cd.get_fndecl_for_call (),
1324 inputs);
1325 return sval;
1326}
1327
1328/* Update this model for an outcome of a call that returns a specific
1329 integer constant.
1330 If UNMERGEABLE, then make the result unmergeable, e.g. to prevent
1331 the state-merger code from merging success and failure outcomes. */
1332
1333void
1334region_model::update_for_int_cst_return (const call_details &cd,
1335 int retval,
1336 bool unmergeable)
1337{
1338 if (!cd.get_lhs_type ())
1339 return;
1340 if (TREE_CODE (cd.get_lhs_type ())((enum tree_code) (cd.get_lhs_type ())->base.code) != INTEGER_TYPE)
1341 return;
1342 const svalue *result
1343 = m_mgr->get_or_create_int_cst (cd.get_lhs_type (), retval);
1344 if (unmergeable)
1345 result = m_mgr->get_or_create_unmergeable (result);
1346 set_value (cd.get_lhs_region (), result, cd.get_ctxt ());
1347}
1348
1349/* Update this model for an outcome of a call that returns zero.
1350 If UNMERGEABLE, then make the result unmergeable, e.g. to prevent
1351 the state-merger code from merging success and failure outcomes. */
1352
1353void
1354region_model::update_for_zero_return (const call_details &cd,
1355 bool unmergeable)
1356{
1357 update_for_int_cst_return (cd, 0, unmergeable);
1358}
1359
1360/* Update this model for an outcome of a call that returns non-zero. */
1361
1362void
1363region_model::update_for_nonzero_return (const call_details &cd)
1364{
1365 if (!cd.get_lhs_type ())
1366 return;
1367 if (TREE_CODE (cd.get_lhs_type ())((enum tree_code) (cd.get_lhs_type ())->base.code) != INTEGER_TYPE)
1368 return;
1369 const svalue *zero
1370 = m_mgr->get_or_create_int_cst (cd.get_lhs_type (), 0);
1371 const svalue *result
1372 = get_store_value (cd.get_lhs_region (), cd.get_ctxt ());
1373 add_constraint (result, NE_EXPR, zero, cd.get_ctxt ());
1374}
1375
1376/* Subroutine of region_model::maybe_get_copy_bounds.
1377 The Linux kernel commonly uses
1378 min_t([unsigned] long, VAR, sizeof(T));
1379 to set an upper bound on the size of a copy_to_user.
1380 Attempt to simplify such sizes by trying to get the upper bound as a
1381 constant.
1382 Return the simplified svalue if possible, or NULL otherwise. */
1383
1384static const svalue *
1385maybe_simplify_upper_bound (const svalue *num_bytes_sval,
1386 region_model_manager *mgr)
1387{
1388 tree type = num_bytes_sval->get_type ();
1389 while (const svalue *raw = num_bytes_sval->maybe_undo_cast ())
1390 num_bytes_sval = raw;
1391 if (const binop_svalue *binop_sval = num_bytes_sval->dyn_cast_binop_svalue ())
1392 if (binop_sval->get_op () == MIN_EXPR)
1393 if (binop_sval->get_arg1 ()->get_kind () == SK_CONSTANT)
1394 {
1395 return mgr->get_or_create_cast (type, binop_sval->get_arg1 ());
1396 /* TODO: we might want to also capture the constraint
1397 when recording the diagnostic, or note that we're using
1398 the upper bound. */
1399 }
1400 return NULLnullptr;
1401}
1402
1403/* Attempt to get an upper bound for the size of a copy when simulating a
1404 copy function.
1405
1406 NUM_BYTES_SVAL is the symbolic value for the size of the copy.
1407 Use it if it's constant, otherwise try to simplify it. Failing
1408 that, use the size of SRC_REG if constant.
1409
1410 Return a symbolic value for an upper limit on the number of bytes
1411 copied, or NULL if no such value could be determined. */
1412
1413const svalue *
1414region_model::maybe_get_copy_bounds (const region *src_reg,
1415 const svalue *num_bytes_sval)
1416{
1417 if (num_bytes_sval->maybe_get_constant ())
1418 return num_bytes_sval;
1419
1420 if (const svalue *simplified
1421 = maybe_simplify_upper_bound (num_bytes_sval, m_mgr))
1422 num_bytes_sval = simplified;
1423
1424 if (num_bytes_sval->maybe_get_constant ())
1425 return num_bytes_sval;
1426
1427 /* For now, try just guessing the size as the capacity of the
1428 base region of the src.
1429 This is a hack; we might get too large a value. */
1430 const region *src_base_reg = src_reg->get_base_region ();
1431 num_bytes_sval = get_capacity (src_base_reg);
1432
1433 if (num_bytes_sval->maybe_get_constant ())
1434 return num_bytes_sval;
1435
1436 /* Non-constant: give up. */
1437 return NULLnullptr;
1438}
1439
1440/* Get any known_function for FNDECL for call CD.
1441
1442 The call must match all assumptions made by the known_function (such as
1443 e.g. "argument 1's type must be a pointer type").
1444
1445 Return NULL if no known_function is found, or it does not match the
1446 assumption(s). */
1447
1448const known_function *
1449region_model::get_known_function (tree fndecl, const call_details &cd) const
1450{
1451 known_function_manager *known_fn_mgr = m_mgr->get_known_function_manager ();
1452 return known_fn_mgr->get_match (fndecl, cd);
1453}
1454
1455/* Get any known_function for IFN, or NULL. */
1456
1457const known_function *
1458region_model::get_known_function (enum internal_fn ifn) const
1459{
1460 known_function_manager *known_fn_mgr = m_mgr->get_known_function_manager ();
1461 return known_fn_mgr->get_internal_fn (ifn);
1462}
1463
1464/* Update this model for the CALL stmt, using CTXT to report any
1465 diagnostics - the first half.
1466
1467 Updates to the region_model that should be made *before* sm-states
1468 are updated are done here; other updates to the region_model are done
1469 in region_model::on_call_post.
1470
1471 Return true if the function call has unknown side effects (it wasn't
1472 recognized and we don't have a body for it, or are unable to tell which
1473 fndecl it is). */
1474
1475bool
1476region_model::on_call_pre (const gcall *call, region_model_context *ctxt)
1477{
1478 call_details cd (call, this, ctxt);
1479
1480 /* Special-case for IFN_DEFERRED_INIT.
1481 We want to report uninitialized variables with -fanalyzer (treating
1482 -ftrivial-auto-var-init= as purely a mitigation feature).
1483 Handle IFN_DEFERRED_INIT by treating it as no-op: don't touch the
1484 lhs of the call, so that it is still uninitialized from the point of
1485 view of the analyzer. */
1486 if (gimple_call_internal_p (call)
1487 && gimple_call_internal_fn (call) == IFN_DEFERRED_INIT)
1488 return false; /* No side effects. */
1489
1490 /* Get svalues for all of the arguments at the callsite, to ensure that we
1491 complain about any uninitialized arguments. This might lead to
1492 duplicates if any of the handling below also looks up the svalues,
1493 but the deduplication code should deal with that. */
1494 if (ctxt)
1495 check_call_args (cd);
1496
1497 tree callee_fndecl = get_fndecl_for_call (call, ctxt);
1498
1499 /* Some of the cases below update the lhs of the call based on the
1500 return value, but not all. Provide a default value, which may
1501 get overwritten below. */
1502 if (tree lhs = gimple_call_lhs (call))
1503 {
1504 const region *lhs_region = get_lvalue (lhs, ctxt);
1505 const svalue *sval = maybe_get_const_fn_result (cd);
1506 if (!sval)
1507 {
1508 if (callee_fndecl
1509 && lookup_attribute ("malloc", DECL_ATTRIBUTES (callee_fndecl)((contains_struct_check ((callee_fndecl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 1509, __FUNCTION__))->decl_common.attributes)
))
1510 {
1511 const region *new_reg
1512 = get_or_create_region_for_heap_alloc (NULLnullptr, ctxt);
1513 mark_region_as_unknown (new_reg, NULLnullptr);
1514 sval = m_mgr->get_ptr_svalue (cd.get_lhs_type (), new_reg);
1515 }
1516 else
1517 /* For the common case of functions without __attribute__((const)),
1518 use a conjured value, and purge any prior state involving that
1519 value (in case this is in a loop). */
1520 sval = m_mgr->get_or_create_conjured_svalue (TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 1520, __FUNCTION__))->typed.type)
, call,
1521 lhs_region,
1522 conjured_purge (this,
1523 ctxt));
1524 }
1525 set_value (lhs_region, sval, ctxt);
1526 }
1527
1528 if (gimple_call_internal_p (call))
1529 if (const known_function *kf
1530 = get_known_function (gimple_call_internal_fn (call)))
1531 {
1532 kf->impl_call_pre (cd);
1533 return false; /* No further side effects. */
1534 }
1535
1536 if (!callee_fndecl)
1537 return true; /* Unknown side effects. */
1538
1539 if (const known_function *kf = get_known_function (callee_fndecl, cd))
1540 {
1541 kf->impl_call_pre (cd);
1542 return false; /* No further side effects. */
1543 }
1544
1545 const int callee_fndecl_flags = flags_from_decl_or_type (callee_fndecl);
1546 if (callee_fndecl_flags & (ECF_CONST(1 << 0) | ECF_PURE(1 << 1)))
1547 return false; /* No side effects. */
1548
1549 if (fndecl_built_in_p (callee_fndecl))
1550 return true; /* Unknown side effects. */
1551
1552 if (!fndecl_has_gimple_body_p (callee_fndecl))
1553 return true; /* Unknown side effects. */
1554
1555 return false; /* No side effects. */
1556}
1557
1558/* Update this model for the CALL stmt, using CTXT to report any
1559 diagnostics - the second half.
1560
1561 Updates to the region_model that should be made *after* sm-states
1562 are updated are done here; other updates to the region_model are done
1563 in region_model::on_call_pre.
1564
1565 If UNKNOWN_SIDE_EFFECTS is true, also call handle_unrecognized_call
1566 to purge state. */
1567
1568void
1569region_model::on_call_post (const gcall *call,
1570 bool unknown_side_effects,
1571 region_model_context *ctxt)
1572{
1573 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
1574 {
1575 call_details cd (call, this, ctxt);
1576 if (const known_function *kf = get_known_function (callee_fndecl, cd))
1577 {
1578 kf->impl_call_post (cd);
1579 return;
1580 }
1581 /* Was this fndecl referenced by
1582 __attribute__((malloc(FOO)))? */
1583 if (lookup_attribute ("*dealloc", DECL_ATTRIBUTES (callee_fndecl)((contains_struct_check ((callee_fndecl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 1583, __FUNCTION__))->decl_common.attributes)
))
1584 {
1585 impl_deallocation_call (cd);
1586 return;
1587 }
1588 }
1589
1590 if (unknown_side_effects)
1591 handle_unrecognized_call (call, ctxt);
1592}
1593
1594/* Purge state involving SVAL from this region_model, using CTXT
1595 (if non-NULL) to purge other state in a program_state.
1596
1597 For example, if we're at the def-stmt of an SSA name, then we need to
1598 purge any state for svalues that involve that SSA name. This avoids
1599 false positives in loops, since a symbolic value referring to the
1600 SSA name will be referring to the previous value of that SSA name.
1601
1602 For example, in:
1603 while ((e = hashmap_iter_next(&iter))) {
1604 struct oid2strbuf *e_strbuf = (struct oid2strbuf *)e;
1605 free (e_strbuf->value);
1606 }
1607 at the def-stmt of e_8:
1608 e_8 = hashmap_iter_next (&iter);
1609 we should purge the "freed" state of:
1610 INIT_VAL(CAST_REG(‘struct oid2strbuf’, (*INIT_VAL(e_8))).value)
1611 which is the "e_strbuf->value" value from the previous iteration,
1612 or we will erroneously report a double-free - the "e_8" within it
1613 refers to the previous value. */
1614
1615void
1616region_model::purge_state_involving (const svalue *sval,
1617 region_model_context *ctxt)
1618{
1619 if (!sval->can_have_associated_state_p ())
1620 return;
1621 m_store.purge_state_involving (sval, m_mgr);
1622 m_constraints->purge_state_involving (sval);
1623 m_dynamic_extents.purge_state_involving (sval);
1624 if (ctxt)
1625 ctxt->purge_state_involving (sval);
1626}
1627
1628/* A pending_note subclass for adding a note about an
1629 __attribute__((access, ...)) to a diagnostic. */
1630
1631class reason_attr_access : public pending_note_subclass<reason_attr_access>
1632{
1633public:
1634 reason_attr_access (tree callee_fndecl, const attr_access &access)
1635 : m_callee_fndecl (callee_fndecl),
1636 m_ptr_argno (access.ptrarg),
1637 m_access_str (TREE_STRING_POINTER (access.to_external_string ())((const char *)((tree_check ((access.to_external_string ()), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 1637, __FUNCTION__, (STRING_CST)))->string.str))
)
1638 {
1639 }
1640
1641 const char *get_kind () const final override { return "reason_attr_access"; }
1642
1643 void emit () const final override
1644 {
1645 inform (DECL_SOURCE_LOCATION (m_callee_fndecl)((contains_struct_check ((m_callee_fndecl), (TS_DECL_MINIMAL)
, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 1645, __FUNCTION__))->decl_minimal.locus)
,
1646 "parameter %i of %qD marked with attribute %qs",
1647 m_ptr_argno + 1, m_callee_fndecl, m_access_str);
1648 }
1649
1650 bool operator== (const reason_attr_access &other) const
1651 {
1652 return (m_callee_fndecl == other.m_callee_fndecl
1653 && m_ptr_argno == other.m_ptr_argno
1654 && !strcmp (m_access_str, other.m_access_str));
1655 }
1656
1657private:
1658 tree m_callee_fndecl;
1659 unsigned m_ptr_argno;
1660 const char *m_access_str;
1661};
1662
1663/* Check CALL a call to external function CALLEE_FNDECL based on
1664 any __attribute__ ((access, ....) on the latter, complaining to
1665 CTXT about any issues.
1666
1667 Currently we merely call check_region_for_write on any regions
1668 pointed to by arguments marked with a "write_only" or "read_write"
1669 attribute. */
1670
1671void
1672region_model::
1673check_external_function_for_access_attr (const gcall *call,
1674 tree callee_fndecl,
1675 region_model_context *ctxt) const
1676{
1677 gcc_assert (call)((void)(!(call) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 1677, __FUNCTION__), 0 : 0))
;
1678 gcc_assert (callee_fndecl)((void)(!(callee_fndecl) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 1678, __FUNCTION__), 0 : 0))
;
1679 gcc_assert (ctxt)((void)(!(ctxt) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 1679, __FUNCTION__), 0 : 0))
;
1680
1681 tree fntype = TREE_TYPE (callee_fndecl)((contains_struct_check ((callee_fndecl), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 1681, __FUNCTION__))->typed.type)
;
1682 if (!fntype)
1683 return;
1684
1685 if (!TYPE_ATTRIBUTES (fntype)((tree_class_check ((fntype), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 1685, __FUNCTION__))->type_common.attributes)
)
1686 return;
1687
1688 /* Initialize a map of attribute access specifications for arguments
1689 to the function call. */
1690 rdwr_map rdwr_idx;
1691 init_attr_rdwr_indices (&rdwr_idx, TYPE_ATTRIBUTES (fntype)((tree_class_check ((fntype), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 1691, __FUNCTION__))->type_common.attributes)
);
1692
1693 unsigned argno = 0;
1694
1695 for (tree iter = TYPE_ARG_TYPES (fntype)((tree_check2 ((fntype), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 1695, __FUNCTION__, (FUNCTION_TYPE), (METHOD_TYPE)))->type_non_common
.values)
; iter;
1696 iter = TREE_CHAIN (iter)((contains_struct_check ((iter), (TS_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 1696, __FUNCTION__))->common.chain)
, ++argno)
1697 {
1698 const attr_access* access = rdwr_idx.get (argno);
1699 if (!access)
1700 continue;
1701
1702 /* Ignore any duplicate entry in the map for the size argument. */
1703 if (access->ptrarg != argno)
1704 continue;
1705
1706 if (access->mode == access_write_only
1707 || access->mode == access_read_write)
1708 {
1709 /* Subclass of decorated_region_model_context that
1710 adds a note about the attr access to any saved diagnostics. */
1711 class annotating_ctxt : public note_adding_context
1712 {
1713 public:
1714 annotating_ctxt (tree callee_fndecl,
1715 const attr_access &access,
1716 region_model_context *ctxt)
1717 : note_adding_context (ctxt),
1718 m_callee_fndecl (callee_fndecl),
1719 m_access (access)
1720 {
1721 }
1722 std::unique_ptr<pending_note> make_note () final override
1723 {
1724 return make_unique<reason_attr_access>
1725 (m_callee_fndecl, m_access);
1726 }
1727 private:
1728 tree m_callee_fndecl;
1729 const attr_access &m_access;
1730 };
1731
1732 /* Use this ctxt below so that any diagnostics get the
1733 note added to them. */
1734 annotating_ctxt my_ctxt (callee_fndecl, *access, ctxt);
1735
1736 tree ptr_tree = gimple_call_arg (call, access->ptrarg);
1737 const svalue *ptr_sval = get_rvalue (ptr_tree, &my_ctxt);
1738 const region *reg = deref_rvalue (ptr_sval, ptr_tree, &my_ctxt);
1739 check_region_for_write (reg, &my_ctxt);
1740 /* We don't use the size arg for now. */
1741 }
1742 }
1743}
1744
1745/* Handle a call CALL to a function with unknown behavior.
1746
1747 Traverse the regions in this model, determining what regions are
1748 reachable from pointer arguments to CALL and from global variables,
1749 recursively.
1750
1751 Set all reachable regions to new unknown values and purge sm-state
1752 from their values, and from values that point to them. */
1753
1754void
1755region_model::handle_unrecognized_call (const gcall *call,
1756 region_model_context *ctxt)
1757{
1758 tree fndecl = get_fndecl_for_call (call, ctxt);
1759
1760 if (fndecl && ctxt)
1761 check_external_function_for_access_attr (call, fndecl, ctxt);
1762
1763 reachable_regions reachable_regs (this);
1764
1765 /* Determine the reachable regions and their mutability. */
1766 {
1767 /* Add globals and regions that already escaped in previous
1768 unknown calls. */
1769 m_store.for_each_cluster (reachable_regions::init_cluster_cb,
1770 &reachable_regs);
1771
1772 /* Params that are pointers. */
1773 tree iter_param_types = NULL_TREE(tree) nullptr;
1774 if (fndecl)
1775 iter_param_types = TYPE_ARG_TYPES (TREE_TYPE (fndecl))((tree_check2 ((((contains_struct_check ((fndecl), (TS_TYPED)
, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 1775, __FUNCTION__))->typed.type)), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 1775, __FUNCTION__, (FUNCTION_TYPE), (METHOD_TYPE)))->type_non_common
.values)
;
1776 for (unsigned arg_idx = 0; arg_idx < gimple_call_num_args (call); arg_idx++)
1777 {
1778 /* Track expected param type, where available. */
1779 tree param_type = NULL_TREE(tree) nullptr;
1780 if (iter_param_types)
1781 {
1782 param_type = TREE_VALUE (iter_param_types)((tree_check ((iter_param_types), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 1782, __FUNCTION__, (TREE_LIST)))->list.value)
;
1783 gcc_assert (param_type)((void)(!(param_type) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 1783, __FUNCTION__), 0 : 0))
;
1784 iter_param_types = TREE_CHAIN (iter_param_types)((contains_struct_check ((iter_param_types), (TS_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 1784, __FUNCTION__))->common.chain)
;
1785 }
1786
1787 tree parm = gimple_call_arg (call, arg_idx);
1788 const svalue *parm_sval = get_rvalue (parm, ctxt);
1789 reachable_regs.handle_parm (parm_sval, param_type);
1790 }
1791 }
1792
1793 uncertainty_t *uncertainty = ctxt ? ctxt->get_uncertainty () : NULLnullptr;
1794
1795 /* Purge sm-state for the svalues that were reachable,
1796 both in non-mutable and mutable form. */
1797 for (svalue_set::iterator iter
1798 = reachable_regs.begin_reachable_svals ();
1799 iter != reachable_regs.end_reachable_svals (); ++iter)
1800 {
1801 const svalue *sval = (*iter);
1802 if (ctxt)
1803 ctxt->on_unknown_change (sval, false);
1804 }
1805 for (svalue_set::iterator iter
1806 = reachable_regs.begin_mutable_svals ();
1807 iter != reachable_regs.end_mutable_svals (); ++iter)
1808 {
1809 const svalue *sval = (*iter);
1810 if (ctxt)
1811 ctxt->on_unknown_change (sval, true);
1812 if (uncertainty)
1813 uncertainty->on_mutable_sval_at_unknown_call (sval);
1814 }
1815
1816 /* Mark any clusters that have escaped. */
1817 reachable_regs.mark_escaped_clusters (ctxt);
1818
1819 /* Update bindings for all clusters that have escaped, whether above,
1820 or previously. */
1821 m_store.on_unknown_fncall (call, m_mgr->get_store_manager (),
1822 conjured_purge (this, ctxt));
1823
1824 /* Purge dynamic extents from any regions that have escaped mutably:
1825 realloc could have been called on them. */
1826 for (hash_set<const region *>::iterator
1827 iter = reachable_regs.begin_mutable_base_regs ();
1828 iter != reachable_regs.end_mutable_base_regs ();
1829 ++iter)
1830 {
1831 const region *base_reg = (*iter);
1832 unset_dynamic_extents (base_reg);
1833 }
1834}
1835
1836/* Traverse the regions in this model, determining what regions are
1837 reachable from the store and populating *OUT.
1838
1839 If EXTRA_SVAL is non-NULL, treat it as an additional "root"
1840 for reachability (for handling return values from functions when
1841 analyzing return of the only function on the stack).
1842
1843 If UNCERTAINTY is non-NULL, treat any svalues that were recorded
1844 within it as being maybe-bound as additional "roots" for reachability.
1845
1846 Find svalues that haven't leaked. */
1847
1848void
1849region_model::get_reachable_svalues (svalue_set *out,
1850 const svalue *extra_sval,
1851 const uncertainty_t *uncertainty)
1852{
1853 reachable_regions reachable_regs (this);
1854
1855 /* Add globals and regions that already escaped in previous
1856 unknown calls. */
1857 m_store.for_each_cluster (reachable_regions::init_cluster_cb,
1858 &reachable_regs);
1859
1860 if (extra_sval)
1861 reachable_regs.handle_sval (extra_sval);
1862
1863 if (uncertainty)
1864 for (uncertainty_t::iterator iter
1865 = uncertainty->begin_maybe_bound_svals ();
1866 iter != uncertainty->end_maybe_bound_svals (); ++iter)
1867 reachable_regs.handle_sval (*iter);
1868
1869 /* Get regions for locals that have explicitly bound values. */
1870 for (store::cluster_map_t::iterator iter = m_store.begin ();
1871 iter != m_store.end (); ++iter)
1872 {
1873 const region *base_reg = (*iter).first;
1874 if (const region *parent = base_reg->get_parent_region ())
1875 if (parent->get_kind () == RK_FRAME)
1876 reachable_regs.add (base_reg, false);
1877 }
1878
1879 /* Populate *OUT based on the values that were reachable. */
1880 for (svalue_set::iterator iter
1881 = reachable_regs.begin_reachable_svals ();
1882 iter != reachable_regs.end_reachable_svals (); ++iter)
1883 out->add (*iter);
1884}
1885
1886/* Update this model for the RETURN_STMT, using CTXT to report any
1887 diagnostics. */
1888
1889void
1890region_model::on_return (const greturn *return_stmt, region_model_context *ctxt)
1891{
1892 tree callee = get_current_function ()->decl;
1893 tree lhs = DECL_RESULT (callee)((tree_check ((callee), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 1893, __FUNCTION__, (FUNCTION_DECL)))->decl_non_common.result
)
;
1894 tree rhs = gimple_return_retval (return_stmt);
1895
1896 if (lhs && rhs)
1897 {
1898 const svalue *sval = get_rvalue (rhs, ctxt);
1899 const region *ret_reg = get_lvalue (lhs, ctxt);
1900 set_value (ret_reg, sval, ctxt);
1901 }
1902}
1903
1904/* Update this model for a call and return of setjmp/sigsetjmp at CALL within
1905 ENODE, using CTXT to report any diagnostics.
1906
1907 This is for the initial direct invocation of setjmp/sigsetjmp (which returns
1908 0), as opposed to any second return due to longjmp/sigsetjmp. */
1909
1910void
1911region_model::on_setjmp (const gcall *call, const exploded_node *enode,
1912 region_model_context *ctxt)
1913{
1914 const svalue *buf_ptr = get_rvalue (gimple_call_arg (call, 0), ctxt);
1915 const region *buf_reg = deref_rvalue (buf_ptr, gimple_call_arg (call, 0),
1916 ctxt);
1917
1918 /* Create a setjmp_svalue for this call and store it in BUF_REG's
1919 region. */
1920 if (buf_reg)
1921 {
1922 setjmp_record r (enode, call);
1923 const svalue *sval
1924 = m_mgr->get_or_create_setjmp_svalue (r, buf_reg->get_type ());
1925 set_value (buf_reg, sval, ctxt);
1926 }
1927
1928 /* Direct calls to setjmp return 0. */
1929 if (tree lhs = gimple_call_lhs (call))
1930 {
1931 const svalue *new_sval
1932 = m_mgr->get_or_create_int_cst (TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 1932, __FUNCTION__))->typed.type)
, 0);
1933 const region *lhs_reg = get_lvalue (lhs, ctxt);
1934 set_value (lhs_reg, new_sval, ctxt);
1935 }
1936}
1937
1938/* Update this region_model for rewinding from a "longjmp" at LONGJMP_CALL
1939 to a "setjmp" at SETJMP_CALL where the final stack depth should be
1940 SETJMP_STACK_DEPTH. Pop any stack frames. Leak detection is *not*
1941 done, and should be done by the caller. */
1942
1943void
1944region_model::on_longjmp (const gcall *longjmp_call, const gcall *setjmp_call,
1945 int setjmp_stack_depth, region_model_context *ctxt)
1946{
1947 /* Evaluate the val, using the frame of the "longjmp". */
1948 tree fake_retval = gimple_call_arg (longjmp_call, 1);
1949 const svalue *fake_retval_sval = get_rvalue (fake_retval, ctxt);
1950
1951 /* Pop any frames until we reach the stack depth of the function where
1952 setjmp was called. */
1953 gcc_assert (get_stack_depth () >= setjmp_stack_depth)((void)(!(get_stack_depth () >= setjmp_stack_depth) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 1953, __FUNCTION__), 0 : 0))
;
1954 while (get_stack_depth () > setjmp_stack_depth)
1955 pop_frame (NULLnullptr, NULLnullptr, ctxt, false);
1956
1957 gcc_assert (get_stack_depth () == setjmp_stack_depth)((void)(!(get_stack_depth () == setjmp_stack_depth) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 1957, __FUNCTION__), 0 : 0))
;
1958
1959 /* Assign to LHS of "setjmp" in new_state. */
1960 if (tree lhs = gimple_call_lhs (setjmp_call))
1961 {
1962 /* Passing 0 as the val to longjmp leads to setjmp returning 1. */
1963 const svalue *zero_sval
1964 = m_mgr->get_or_create_int_cst (TREE_TYPE (fake_retval)((contains_struct_check ((fake_retval), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 1964, __FUNCTION__))->typed.type)
, 0);
1965 tristate eq_zero = eval_condition (fake_retval_sval, EQ_EXPR, zero_sval);
1966 /* If we have 0, use 1. */
1967 if (eq_zero.is_true ())
1968 {
1969 const svalue *one_sval
1970 = m_mgr->get_or_create_int_cst (TREE_TYPE (fake_retval)((contains_struct_check ((fake_retval), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 1970, __FUNCTION__))->typed.type)
, 1);
1971 fake_retval_sval = one_sval;
1972 }
1973 else
1974 {
1975 /* Otherwise note that the value is nonzero. */
1976 m_constraints->add_constraint (fake_retval_sval, NE_EXPR, zero_sval);
1977 }
1978
1979 /* Decorate the return value from setjmp as being unmergeable,
1980 so that we don't attempt to merge states with it as zero
1981 with states in which it's nonzero, leading to a clean distinction
1982 in the exploded_graph betweeen the first return and the second
1983 return. */
1984 fake_retval_sval = m_mgr->get_or_create_unmergeable (fake_retval_sval);
1985
1986 const region *lhs_reg = get_lvalue (lhs, ctxt);
1987 set_value (lhs_reg, fake_retval_sval, ctxt);
1988 }
1989}
1990
1991/* Update this region_model for a phi stmt of the form
1992 LHS = PHI <...RHS...>.
1993 where RHS is for the appropriate edge.
1994 Get state from OLD_STATE so that all of the phi stmts for a basic block
1995 are effectively handled simultaneously. */
1996
1997void
1998region_model::handle_phi (const gphi *phi,
1999 tree lhs, tree rhs,
2000 const region_model &old_state,
2001 region_model_context *ctxt)
2002{
2003 /* For now, don't bother tracking the .MEM SSA names. */
2004 if (tree var = SSA_NAME_VAR (lhs)((tree_check ((lhs), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2004, __FUNCTION__, (SSA_NAME)))->ssa_name.var == (tree)
nullptr || ((enum tree_code) ((lhs)->ssa_name.var)->base
.code) == IDENTIFIER_NODE ? (tree) nullptr : (lhs)->ssa_name
.var)
)
2005 if (TREE_CODE (var)((enum tree_code) (var)->base.code) == VAR_DECL)
2006 if (VAR_DECL_IS_VIRTUAL_OPERAND (var)((tree_check ((var), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2006, __FUNCTION__, (VAR_DECL)))->base.u.bits.saturating_flag
)
)
2007 return;
2008
2009 const svalue *src_sval = old_state.get_rvalue (rhs, ctxt);
2010 const region *dst_reg = old_state.get_lvalue (lhs, ctxt);
2011
2012 set_value (dst_reg, src_sval, ctxt);
2013
2014 if (ctxt)
2015 ctxt->on_phi (phi, rhs);
2016}
2017
2018/* Implementation of region_model::get_lvalue; the latter adds type-checking.
2019
2020 Get the id of the region for PV within this region_model,
2021 emitting any diagnostics to CTXT. */
2022
2023const region *
2024region_model::get_lvalue_1 (path_var pv, region_model_context *ctxt) const
2025{
2026 tree expr = pv.m_tree;
2027
2028 gcc_assert (expr)((void)(!(expr) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2028, __FUNCTION__), 0 : 0))
;
2029
2030 switch (TREE_CODE (expr)((enum tree_code) (expr)->base.code))
2031 {
2032 default:
2033 return m_mgr->get_region_for_unexpected_tree_code (ctxt, expr,
2034 dump_location_t ());
2035
2036 case ARRAY_REF:
2037 {
2038 tree array = TREE_OPERAND (expr, 0)(*((const_cast<tree*> (tree_operand_check ((expr), (0),
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2038, __FUNCTION__)))))
;
2039 tree index = TREE_OPERAND (expr, 1)(*((const_cast<tree*> (tree_operand_check ((expr), (1),
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2039, __FUNCTION__)))))
;
2040
2041 const region *array_reg = get_lvalue (array, ctxt);
2042 const svalue *index_sval = get_rvalue (index, ctxt);
2043 return m_mgr->get_element_region (array_reg,
2044 TREE_TYPE (TREE_TYPE (array))((contains_struct_check ((((contains_struct_check ((array), (
TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2044, __FUNCTION__))->typed.type)), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2044, __FUNCTION__))->typed.type)
,
2045 index_sval);
2046 }
2047 break;
2048
2049 case BIT_FIELD_REF:
2050 {
2051 tree inner_expr = TREE_OPERAND (expr, 0)(*((const_cast<tree*> (tree_operand_check ((expr), (0),
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2051, __FUNCTION__)))))
;
2052 const region *inner_reg = get_lvalue (inner_expr, ctxt);
2053 tree num_bits = TREE_OPERAND (expr, 1)(*((const_cast<tree*> (tree_operand_check ((expr), (1),
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2053, __FUNCTION__)))))
;
2054 tree first_bit_offset = TREE_OPERAND (expr, 2)(*((const_cast<tree*> (tree_operand_check ((expr), (2),
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2054, __FUNCTION__)))))
;
2055 gcc_assert (TREE_CODE (num_bits) == INTEGER_CST)((void)(!(((enum tree_code) (num_bits)->base.code) == INTEGER_CST
) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2055, __FUNCTION__), 0 : 0))
;
2056 gcc_assert (TREE_CODE (first_bit_offset) == INTEGER_CST)((void)(!(((enum tree_code) (first_bit_offset)->base.code)
== INTEGER_CST) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2056, __FUNCTION__), 0 : 0))
;
2057 bit_range bits (TREE_INT_CST_LOW (first_bit_offset)((unsigned long) (*tree_int_cst_elt_check ((first_bit_offset)
, (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2057, __FUNCTION__)))
,
2058 TREE_INT_CST_LOW (num_bits)((unsigned long) (*tree_int_cst_elt_check ((num_bits), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2058, __FUNCTION__)))
);
2059 return m_mgr->get_bit_range (inner_reg, TREE_TYPE (expr)((contains_struct_check ((expr), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2059, __FUNCTION__))->typed.type)
, bits);
2060 }
2061 break;
2062
2063 case MEM_REF:
2064 {
2065 tree ptr = TREE_OPERAND (expr, 0)(*((const_cast<tree*> (tree_operand_check ((expr), (0),
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2065, __FUNCTION__)))))
;
2066 tree offset = TREE_OPERAND (expr, 1)(*((const_cast<tree*> (tree_operand_check ((expr), (1),
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2066, __FUNCTION__)))))
;
2067 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
2068 const svalue *offset_sval = get_rvalue (offset, ctxt);
2069 const region *star_ptr = deref_rvalue (ptr_sval, ptr, ctxt);
2070 return m_mgr->get_offset_region (star_ptr,
2071 TREE_TYPE (expr)((contains_struct_check ((expr), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2071, __FUNCTION__))->typed.type)
,
2072 offset_sval);
2073 }
2074 break;
2075
2076 case FUNCTION_DECL:
2077 return m_mgr->get_region_for_fndecl (expr);
2078
2079 case LABEL_DECL:
2080 return m_mgr->get_region_for_label (expr);
2081
2082 case VAR_DECL:
2083 /* Handle globals. */
2084 if (is_global_var (expr))
2085 return m_mgr->get_region_for_global (expr);
2086
2087 /* Fall through. */
2088
2089 case SSA_NAME:
2090 case PARM_DECL:
2091 case RESULT_DECL:
2092 {
2093 gcc_assert (TREE_CODE (expr) == SSA_NAME((void)(!(((enum tree_code) (expr)->base.code) == SSA_NAME
|| ((enum tree_code) (expr)->base.code) == PARM_DECL || (
(enum tree_code) (expr)->base.code) == VAR_DECL || ((enum tree_code
) (expr)->base.code) == RESULT_DECL) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2096, __FUNCTION__), 0 : 0))
2094 || TREE_CODE (expr) == PARM_DECL((void)(!(((enum tree_code) (expr)->base.code) == SSA_NAME
|| ((enum tree_code) (expr)->base.code) == PARM_DECL || (
(enum tree_code) (expr)->base.code) == VAR_DECL || ((enum tree_code
) (expr)->base.code) == RESULT_DECL) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2096, __FUNCTION__), 0 : 0))
2095 || TREE_CODE (expr) == VAR_DECL((void)(!(((enum tree_code) (expr)->base.code) == SSA_NAME
|| ((enum tree_code) (expr)->base.code) == PARM_DECL || (
(enum tree_code) (expr)->base.code) == VAR_DECL || ((enum tree_code
) (expr)->base.code) == RESULT_DECL) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2096, __FUNCTION__), 0 : 0))
2096 || TREE_CODE (expr) == RESULT_DECL)((void)(!(((enum tree_code) (expr)->base.code) == SSA_NAME
|| ((enum tree_code) (expr)->base.code) == PARM_DECL || (
(enum tree_code) (expr)->base.code) == VAR_DECL || ((enum tree_code
) (expr)->base.code) == RESULT_DECL) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2096, __FUNCTION__), 0 : 0))
;
2097
2098 int stack_index = pv.m_stack_depth;
2099 const frame_region *frame = get_frame_at_index (stack_index);
2100 gcc_assert (frame)((void)(!(frame) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2100, __FUNCTION__), 0 : 0))
;
2101 return frame->get_region_for_local (m_mgr, expr, ctxt);
2102 }
2103
2104 case COMPONENT_REF:
2105 {
2106 /* obj.field */
2107 tree obj = TREE_OPERAND (expr, 0)(*((const_cast<tree*> (tree_operand_check ((expr), (0),
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2107, __FUNCTION__)))))
;
2108 tree field = TREE_OPERAND (expr, 1)(*((const_cast<tree*> (tree_operand_check ((expr), (1),
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2108, __FUNCTION__)))))
;
2109 const region *obj_reg = get_lvalue (obj, ctxt);
2110 return m_mgr->get_field_region (obj_reg, field);
2111 }
2112 break;
2113
2114 case STRING_CST:
2115 return m_mgr->get_region_for_string (expr);
2116 }
2117}
2118
2119/* Assert that SRC_TYPE can be converted to DST_TYPE as a no-op. */
2120
2121static void
2122assert_compat_types (tree src_type, tree dst_type)
2123{
2124 if (src_type && dst_type && !VOID_TYPE_P (dst_type)(((enum tree_code) (dst_type)->base.code) == VOID_TYPE))
2125 {
2126#if CHECKING_P1
2127 if (!(useless_type_conversion_p (src_type, dst_type)))
2128 internal_error ("incompatible types: %qT and %qT", src_type, dst_type);
2129#endif
2130 }
2131}
2132
2133/* Return true if SRC_TYPE can be converted to DST_TYPE as a no-op. */
2134
2135bool
2136compat_types_p (tree src_type, tree dst_type)
2137{
2138 if (src_type && dst_type && !VOID_TYPE_P (dst_type)(((enum tree_code) (dst_type)->base.code) == VOID_TYPE))
2139 if (!(useless_type_conversion_p (src_type, dst_type)))
2140 return false;
2141 return true;
2142}
2143
2144/* Get the region for PV within this region_model,
2145 emitting any diagnostics to CTXT. */
2146
2147const region *
2148region_model::get_lvalue (path_var pv, region_model_context *ctxt) const
2149{
2150 if (pv.m_tree == NULL_TREE(tree) nullptr)
2151 return NULLnullptr;
2152
2153 const region *result_reg = get_lvalue_1 (pv, ctxt);
2154 assert_compat_types (result_reg->get_type (), TREE_TYPE (pv.m_tree)((contains_struct_check ((pv.m_tree), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2154, __FUNCTION__))->typed.type)
);
2155 return result_reg;
2156}
2157
2158/* Get the region for EXPR within this region_model (assuming the most
2159 recent stack frame if it's a local). */
2160
2161const region *
2162region_model::get_lvalue (tree expr, region_model_context *ctxt) const
2163{
2164 return get_lvalue (path_var (expr, get_stack_depth () - 1), ctxt);
2165}
2166
2167/* Implementation of region_model::get_rvalue; the latter adds type-checking.
2168
2169 Get the value of PV within this region_model,
2170 emitting any diagnostics to CTXT. */
2171
2172const svalue *
2173region_model::get_rvalue_1 (path_var pv, region_model_context *ctxt) const
2174{
2175 gcc_assert (pv.m_tree)((void)(!(pv.m_tree) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2175, __FUNCTION__), 0 : 0))
;
2176
2177 switch (TREE_CODE (pv.m_tree)((enum tree_code) (pv.m_tree)->base.code))
2178 {
2179 default:
2180 return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (pv.m_tree)((contains_struct_check ((pv.m_tree), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2180, __FUNCTION__))->typed.type)
);
2181
2182 case ADDR_EXPR:
2183 {
2184 /* "&EXPR". */
2185 tree expr = pv.m_tree;
2186 tree op0 = TREE_OPERAND (expr, 0)(*((const_cast<tree*> (tree_operand_check ((expr), (0),
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2186, __FUNCTION__)))))
;
2187 const region *expr_reg = get_lvalue (op0, ctxt);
2188 return m_mgr->get_ptr_svalue (TREE_TYPE (expr)((contains_struct_check ((expr), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2188, __FUNCTION__))->typed.type)
, expr_reg);
2189 }
2190 break;
2191
2192 case BIT_FIELD_REF:
2193 {
2194 tree expr = pv.m_tree;
2195 tree op0 = TREE_OPERAND (expr, 0)(*((const_cast<tree*> (tree_operand_check ((expr), (0),
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2195, __FUNCTION__)))))
;
2196 const region *reg = get_lvalue (op0, ctxt);
2197 tree num_bits = TREE_OPERAND (expr, 1)(*((const_cast<tree*> (tree_operand_check ((expr), (1),
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2197, __FUNCTION__)))))
;
2198 tree first_bit_offset = TREE_OPERAND (expr, 2)(*((const_cast<tree*> (tree_operand_check ((expr), (2),
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2198, __FUNCTION__)))))
;
2199 gcc_assert (TREE_CODE (num_bits) == INTEGER_CST)((void)(!(((enum tree_code) (num_bits)->base.code) == INTEGER_CST
) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2199, __FUNCTION__), 0 : 0))
;
2200 gcc_assert (TREE_CODE (first_bit_offset) == INTEGER_CST)((void)(!(((enum tree_code) (first_bit_offset)->base.code)
== INTEGER_CST) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2200, __FUNCTION__), 0 : 0))
;
2201 bit_range bits (TREE_INT_CST_LOW (first_bit_offset)((unsigned long) (*tree_int_cst_elt_check ((first_bit_offset)
, (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2201, __FUNCTION__)))
,
2202 TREE_INT_CST_LOW (num_bits)((unsigned long) (*tree_int_cst_elt_check ((num_bits), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2202, __FUNCTION__)))
);
2203 return get_rvalue_for_bits (TREE_TYPE (expr)((contains_struct_check ((expr), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2203, __FUNCTION__))->typed.type)
, reg, bits, ctxt);
2204 }
2205
2206 case VAR_DECL:
2207 if (DECL_HARD_REGISTER (pv.m_tree)((tree_check ((pv.m_tree), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2207, __FUNCTION__, (VAR_DECL)))->decl_with_vis.hard_register
)
)
2208 {
2209 /* If it has a hard register, it doesn't have a memory region
2210 and can't be referred to as an lvalue. */
2211 return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (pv.m_tree)((contains_struct_check ((pv.m_tree), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2211, __FUNCTION__))->typed.type)
);
2212 }
2213 /* Fall through. */
2214 case PARM_DECL:
2215 case SSA_NAME:
2216 case RESULT_DECL:
2217 case ARRAY_REF:
2218 {
2219 const region *reg = get_lvalue (pv, ctxt);
2220 return get_store_value (reg, ctxt);
2221 }
2222
2223 case REALPART_EXPR:
2224 case IMAGPART_EXPR:
2225 case VIEW_CONVERT_EXPR:
2226 {
2227 tree expr = pv.m_tree;
2228 tree arg = TREE_OPERAND (expr, 0)(*((const_cast<tree*> (tree_operand_check ((expr), (0),
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2228, __FUNCTION__)))))
;
2229 const svalue *arg_sval = get_rvalue (arg, ctxt);
2230 const svalue *sval_unaryop
2231 = m_mgr->get_or_create_unaryop (TREE_TYPE (expr)((contains_struct_check ((expr), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2231, __FUNCTION__))->typed.type)
, TREE_CODE (expr)((enum tree_code) (expr)->base.code),
2232 arg_sval);
2233 return sval_unaryop;
2234 };
2235
2236 case INTEGER_CST:
2237 case REAL_CST:
2238 case COMPLEX_CST:
2239 case VECTOR_CST:
2240 case STRING_CST:
2241 return m_mgr->get_or_create_constant_svalue (pv.m_tree);
2242
2243 case POINTER_PLUS_EXPR:
2244 {
2245 tree expr = pv.m_tree;
2246 tree ptr = TREE_OPERAND (expr, 0)(*((const_cast<tree*> (tree_operand_check ((expr), (0),
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2246, __FUNCTION__)))))
;
2247 tree offset = TREE_OPERAND (expr, 1)(*((const_cast<tree*> (tree_operand_check ((expr), (1),
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2247, __FUNCTION__)))))
;
2248 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
2249 const svalue *offset_sval = get_rvalue (offset, ctxt);
2250 const svalue *sval_binop
2251 = m_mgr->get_or_create_binop (TREE_TYPE (expr)((contains_struct_check ((expr), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2251, __FUNCTION__))->typed.type)
, POINTER_PLUS_EXPR,
2252 ptr_sval, offset_sval);
2253 return sval_binop;
2254 }
2255
2256 /* Binary ops. */
2257 case PLUS_EXPR:
2258 case MULT_EXPR:
2259 case BIT_AND_EXPR:
2260 case BIT_IOR_EXPR:
2261 case BIT_XOR_EXPR:
2262 {
2263 tree expr = pv.m_tree;
2264 tree arg0 = TREE_OPERAND (expr, 0)(*((const_cast<tree*> (tree_operand_check ((expr), (0),
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2264, __FUNCTION__)))))
;
2265 tree arg1 = TREE_OPERAND (expr, 1)(*((const_cast<tree*> (tree_operand_check ((expr), (1),
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2265, __FUNCTION__)))))
;
2266 const svalue *arg0_sval = get_rvalue (arg0, ctxt);
2267 const svalue *arg1_sval = get_rvalue (arg1, ctxt);
2268 const svalue *sval_binop
2269 = m_mgr->get_or_create_binop (TREE_TYPE (expr)((contains_struct_check ((expr), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2269, __FUNCTION__))->typed.type)
, TREE_CODE (expr)((enum tree_code) (expr)->base.code),
2270 arg0_sval, arg1_sval);
2271 return sval_binop;
2272 }
2273
2274 case COMPONENT_REF:
2275 case MEM_REF:
2276 {
2277 const region *ref_reg = get_lvalue (pv, ctxt);
2278 return get_store_value (ref_reg, ctxt);
2279 }
2280 case OBJ_TYPE_REF:
2281 {
2282 tree expr = OBJ_TYPE_REF_EXPR (pv.m_tree)(*((const_cast<tree*> (tree_operand_check (((tree_check
((pv.m_tree), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2282, __FUNCTION__, (OBJ_TYPE_REF)))), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2282, __FUNCTION__)))))
;
2283 return get_rvalue (expr, ctxt);
2284 }
2285 }
2286}
2287
2288/* Get the value of PV within this region_model,
2289 emitting any diagnostics to CTXT. */
2290
2291const svalue *
2292region_model::get_rvalue (path_var pv, region_model_context *ctxt) const
2293{
2294 if (pv.m_tree == NULL_TREE(tree) nullptr)
2295 return NULLnullptr;
2296
2297 const svalue *result_sval = get_rvalue_1 (pv, ctxt);
2298
2299 assert_compat_types (result_sval->get_type (), TREE_TYPE (pv.m_tree)((contains_struct_check ((pv.m_tree), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2299, __FUNCTION__))->typed.type)
);
2300
2301 result_sval = check_for_poison (result_sval, pv.m_tree, NULLnullptr, ctxt);
2302
2303 return result_sval;
2304}
2305
2306/* Get the value of EXPR within this region_model (assuming the most
2307 recent stack frame if it's a local). */
2308
2309const svalue *
2310region_model::get_rvalue (tree expr, region_model_context *ctxt) const
2311{
2312 return get_rvalue (path_var (expr, get_stack_depth () - 1), ctxt);
2313}
2314
2315/* Return true if this model is on a path with "main" as the entrypoint
2316 (as opposed to one in which we're merely analyzing a subset of the
2317 path through the code). */
2318
2319bool
2320region_model::called_from_main_p () const
2321{
2322 if (!m_current_frame)
2323 return false;
2324 /* Determine if the oldest stack frame in this model is for "main". */
2325 const frame_region *frame0 = get_frame_at_index (0);
2326 gcc_assert (frame0)((void)(!(frame0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2326, __FUNCTION__), 0 : 0))
;
2327 return id_equal (DECL_NAME (frame0->get_function ()->decl)((contains_struct_check ((frame0->get_function ()->decl
), (TS_DECL_MINIMAL), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2327, __FUNCTION__))->decl_minimal.name)
, "main");
2328}
2329
2330/* Subroutine of region_model::get_store_value for when REG is (or is within)
2331 a global variable that hasn't been touched since the start of this path
2332 (or was implicitly touched due to a call to an unknown function). */
2333
2334const svalue *
2335region_model::get_initial_value_for_global (const region *reg) const
2336{
2337 /* Get the decl that REG is for (or is within). */
2338 const decl_region *base_reg
2339 = reg->get_base_region ()->dyn_cast_decl_region ();
2340 gcc_assert (base_reg)((void)(!(base_reg) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2340, __FUNCTION__), 0 : 0))
;
2341 tree decl = base_reg->get_decl ();
2342
2343 /* Special-case: to avoid having to explicitly update all previously
2344 untracked globals when calling an unknown fn, they implicitly have
2345 an unknown value if an unknown call has occurred, unless this is
2346 static to-this-TU and hasn't escaped. Globals that have escaped
2347 are explicitly tracked, so we shouldn't hit this case for them. */
2348 if (m_store.called_unknown_fn_p ()
2349 && TREE_PUBLIC (decl)((decl)->base.public_flag)
2350 && !TREE_READONLY (decl)((non_type_check ((decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2350, __FUNCTION__))->base.readonly_flag)
)
2351 return m_mgr->get_or_create_unknown_svalue (reg->get_type ());
2352
2353 /* If we are on a path from the entrypoint from "main" and we have a
2354 global decl defined in this TU that hasn't been touched yet, then
2355 the initial value of REG can be taken from the initialization value
2356 of the decl. */
2357 if (called_from_main_p () || TREE_READONLY (decl)((non_type_check ((decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2357, __FUNCTION__))->base.readonly_flag)
)
2358 {
2359 /* Attempt to get the initializer value for base_reg. */
2360 if (const svalue *base_reg_init
2361 = base_reg->get_svalue_for_initializer (m_mgr))
2362 {
2363 if (reg == base_reg)
2364 return base_reg_init;
2365 else
2366 {
2367 /* Get the value for REG within base_reg_init. */
2368 binding_cluster c (base_reg);
2369 c.bind (m_mgr->get_store_manager (), base_reg, base_reg_init);
2370 const svalue *sval
2371 = c.get_any_binding (m_mgr->get_store_manager (), reg);
2372 if (sval)
2373 {
2374 if (reg->get_type ())
2375 sval = m_mgr->get_or_create_cast (reg->get_type (),
2376 sval);
2377 return sval;
2378 }
2379 }
2380 }
2381 }
2382
2383 /* Otherwise, return INIT_VAL(REG). */
2384 return m_mgr->get_or_create_initial_value (reg);
2385}
2386
2387/* Get a value for REG, looking it up in the store, or otherwise falling
2388 back to "initial" or "unknown" values.
2389 Use CTXT to report any warnings associated with reading from REG. */
2390
2391const svalue *
2392region_model::get_store_value (const region *reg,
2393 region_model_context *ctxt) const
2394{
2395 /* Getting the value of an empty region gives an unknown_svalue. */
2396 if (reg->empty_p ())
2397 return m_mgr->get_or_create_unknown_svalue (reg->get_type ());
2398
2399 check_region_for_read (reg, ctxt);
2400
2401 /* Special-case: handle var_decls in the constant pool. */
2402 if (const decl_region *decl_reg = reg->dyn_cast_decl_region ())
2403 if (const svalue *sval = decl_reg->maybe_get_constant_value (m_mgr))
2404 return sval;
2405
2406 const svalue *sval
2407 = m_store.get_any_binding (m_mgr->get_store_manager (), reg);
2408 if (sval)
2409 {
2410 if (reg->get_type ())
2411 sval = m_mgr->get_or_create_cast (reg->get_type (), sval);
2412 return sval;
2413 }
2414
2415 /* Special-case: read at a constant index within a STRING_CST. */
2416 if (const offset_region *offset_reg = reg->dyn_cast_offset_region ())
2417 if (tree byte_offset_cst
2418 = offset_reg->get_byte_offset ()->maybe_get_constant ())
2419 if (const string_region *str_reg
2420 = reg->get_parent_region ()->dyn_cast_string_region ())
2421 {
2422 tree string_cst = str_reg->get_string_cst ();
2423 if (const svalue *char_sval
2424 = m_mgr->maybe_get_char_from_string_cst (string_cst,
2425 byte_offset_cst))
2426 return m_mgr->get_or_create_cast (reg->get_type (), char_sval);
2427 }
2428
2429 /* Special-case: read the initial char of a STRING_CST. */
2430 if (const cast_region *cast_reg = reg->dyn_cast_cast_region ())
2431 if (const string_region *str_reg
2432 = cast_reg->get_original_region ()->dyn_cast_string_region ())
2433 {
2434 tree string_cst = str_reg->get_string_cst ();
2435 tree byte_offset_cst = build_int_cst (integer_type_nodeinteger_types[itk_int], 0);
2436 if (const svalue *char_sval
2437 = m_mgr->maybe_get_char_from_string_cst (string_cst,
2438 byte_offset_cst))
2439 return m_mgr->get_or_create_cast (reg->get_type (), char_sval);
2440 }
2441
2442 /* Otherwise we implicitly have the initial value of the region
2443 (if the cluster had been touched, binding_cluster::get_any_binding,
2444 would have returned UNKNOWN, and we would already have returned
2445 that above). */
2446
2447 /* Handle globals. */
2448 if (reg->get_base_region ()->get_parent_region ()->get_kind ()
2449 == RK_GLOBALS)
2450 return get_initial_value_for_global (reg);
2451
2452 return m_mgr->get_or_create_initial_value (reg);
2453}
2454
2455/* Return false if REG does not exist, true if it may do.
2456 This is for detecting regions within the stack that don't exist anymore
2457 after frames are popped. */
2458
2459bool
2460region_model::region_exists_p (const region *reg) const
2461{
2462 /* If within a stack frame, check that the stack frame is live. */
2463 if (const frame_region *enclosing_frame = reg->maybe_get_frame_region ())
2464 {
2465 /* Check that the current frame is the enclosing frame, or is called
2466 by it. */
2467 for (const frame_region *iter_frame = get_current_frame (); iter_frame;
2468 iter_frame = iter_frame->get_calling_frame ())
2469 if (iter_frame == enclosing_frame)
2470 return true;
2471 return false;
2472 }
2473
2474 return true;
2475}
2476
2477/* Get a region for referencing PTR_SVAL, creating a region if need be, and
2478 potentially generating warnings via CTXT.
2479 PTR_SVAL must be of pointer type.
2480 PTR_TREE if non-NULL can be used when emitting diagnostics. */
2481
2482const region *
2483region_model::deref_rvalue (const svalue *ptr_sval, tree ptr_tree,
2484 region_model_context *ctxt) const
2485{
2486 gcc_assert (ptr_sval)((void)(!(ptr_sval) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2486, __FUNCTION__), 0 : 0))
;
2487 gcc_assert (POINTER_TYPE_P (ptr_sval->get_type ()))((void)(!((((enum tree_code) (ptr_sval->get_type ())->base
.code) == POINTER_TYPE || ((enum tree_code) (ptr_sval->get_type
())->base.code) == REFERENCE_TYPE)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2487, __FUNCTION__), 0 : 0))
;
2488
2489 /* If we're dereferencing PTR_SVAL, assume that it is non-NULL; add this
2490 as a constraint. This suppresses false positives from
2491 -Wanalyzer-null-dereference for the case where we later have an
2492 if (PTR_SVAL) that would occur if we considered the false branch
2493 and transitioned the malloc state machine from start->null. */
2494 tree null_ptr_cst = build_int_cst (ptr_sval->get_type (), 0);
2495 const svalue *null_ptr = m_mgr->get_or_create_constant_svalue (null_ptr_cst);
2496 m_constraints->add_constraint (ptr_sval, NE_EXPR, null_ptr);
2497
2498 switch (ptr_sval->get_kind ())
2499 {
2500 default:
2501 break;
2502
2503 case SK_REGION:
2504 {
2505 const region_svalue *region_sval
2506 = as_a <const region_svalue *> (ptr_sval);
2507 return region_sval->get_pointee ();
2508 }
2509
2510 case SK_BINOP:
2511 {
2512 const binop_svalue *binop_sval
2513 = as_a <const binop_svalue *> (ptr_sval);
2514 switch (binop_sval->get_op ())
2515 {
2516 case POINTER_PLUS_EXPR:
2517 {
2518 /* If we have a symbolic value expressing pointer arithmentic,
2519 try to convert it to a suitable region. */
2520 const region *parent_region
2521 = deref_rvalue (binop_sval->get_arg0 (), NULL_TREE(tree) nullptr, ctxt);
2522 const svalue *offset = binop_sval->get_arg1 ();
2523 tree type= TREE_TYPE (ptr_sval->get_type ())((contains_struct_check ((ptr_sval->get_type ()), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2523, __FUNCTION__))->typed.type)
;
2524 return m_mgr->get_offset_region (parent_region, type, offset);
2525 }
2526 default:
2527 break;
2528 }
2529 }
2530 break;
2531
2532 case SK_POISONED:
2533 {
2534 if (ctxt)
2535 {
2536 tree ptr = get_representative_tree (ptr_sval);
2537 /* If we can't get a representative tree for PTR_SVAL
2538 (e.g. if it hasn't been bound into the store), then
2539 fall back on PTR_TREE, if non-NULL. */
2540 if (!ptr)
2541 ptr = ptr_tree;
2542 if (ptr)
2543 {
2544 const poisoned_svalue *poisoned_sval
2545 = as_a <const poisoned_svalue *> (ptr_sval);
2546 enum poison_kind pkind = poisoned_sval->get_poison_kind ();
2547 ctxt->warn (make_unique<poisoned_value_diagnostic>
2548 (ptr, pkind, NULLnullptr, NULLnullptr));
2549 }
2550 }
2551 }
2552 break;
2553 }
2554
2555 return m_mgr->get_symbolic_region (ptr_sval);
2556}
2557
2558/* Attempt to get BITS within any value of REG, as TYPE.
2559 In particular, extract values from compound_svalues for the case
2560 where there's a concrete binding at BITS.
2561 Return an unknown svalue if we can't handle the given case.
2562 Use CTXT to report any warnings associated with reading from REG. */
2563
2564const svalue *
2565region_model::get_rvalue_for_bits (tree type,
2566 const region *reg,
2567 const bit_range &bits,
2568 region_model_context *ctxt) const
2569{
2570 const svalue *sval = get_store_value (reg, ctxt);
2571 return m_mgr->get_or_create_bits_within (type, bits, sval);
2572}
2573
2574/* A subclass of pending_diagnostic for complaining about writes to
2575 constant regions of memory. */
2576
2577class write_to_const_diagnostic
2578: public pending_diagnostic_subclass<write_to_const_diagnostic>
2579{
2580public:
2581 write_to_const_diagnostic (const region *reg, tree decl)
2582 : m_reg (reg), m_decl (decl)
2583 {}
2584
2585 const char *get_kind () const final override
2586 {
2587 return "write_to_const_diagnostic";
2588 }
2589
2590 bool operator== (const write_to_const_diagnostic &other) const
2591 {
2592 return (m_reg == other.m_reg
2593 && m_decl == other.m_decl);
2594 }
2595
2596 int get_controlling_option () const final override
2597 {
2598 return OPT_Wanalyzer_write_to_const;
2599 }
2600
2601 bool emit (rich_location *rich_loc) final override
2602 {
2603 auto_diagnostic_group d;
2604 bool warned;
2605 switch (m_reg->get_kind ())
2606 {
2607 default:
2608 warned = warning_at (rich_loc, get_controlling_option (),
2609 "write to %<const%> object %qE", m_decl);
2610 break;
2611 case RK_FUNCTION:
2612 warned = warning_at (rich_loc, get_controlling_option (),
2613 "write to function %qE", m_decl);
2614 break;
2615 case RK_LABEL:
2616 warned = warning_at (rich_loc, get_controlling_option (),
2617 "write to label %qE", m_decl);
2618 break;
2619 }
2620 if (warned)
2621 inform (DECL_SOURCE_LOCATION (m_decl)((contains_struct_check ((m_decl), (TS_DECL_MINIMAL), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2621, __FUNCTION__))->decl_minimal.locus)
, "declared here");
2622 return warned;
2623 }
2624
2625 label_text describe_final_event (const evdesc::final_event &ev) final override
2626 {
2627 switch (m_reg->get_kind ())
2628 {
2629 default:
2630 return ev.formatted_print ("write to %<const%> object %qE here", m_decl);
2631 case RK_FUNCTION:
2632 return ev.formatted_print ("write to function %qE here", m_decl);
2633 case RK_LABEL:
2634 return ev.formatted_print ("write to label %qE here", m_decl);
2635 }
2636 }
2637
2638private:
2639 const region *m_reg;
2640 tree m_decl;
2641};
2642
2643/* A subclass of pending_diagnostic for complaining about writes to
2644 string literals. */
2645
2646class write_to_string_literal_diagnostic
2647: public pending_diagnostic_subclass<write_to_string_literal_diagnostic>
2648{
2649public:
2650 write_to_string_literal_diagnostic (const region *reg)
2651 : m_reg (reg)
2652 {}
2653
2654 const char *get_kind () const final override
2655 {
2656 return "write_to_string_literal_diagnostic";
2657 }
2658
2659 bool operator== (const write_to_string_literal_diagnostic &other) const
2660 {
2661 return m_reg == other.m_reg;
2662 }
2663
2664 int get_controlling_option () const final override
2665 {
2666 return OPT_Wanalyzer_write_to_string_literal;
2667 }
2668
2669 bool emit (rich_location *rich_loc) final override
2670 {
2671 return warning_at (rich_loc, get_controlling_option (),
2672 "write to string literal");
2673 /* Ideally we would show the location of the STRING_CST as well,
2674 but it is not available at this point. */
2675 }
2676
2677 label_text describe_final_event (const evdesc::final_event &ev) final override
2678 {
2679 return ev.formatted_print ("write to string literal here");
2680 }
2681
2682private:
2683 const region *m_reg;
2684};
2685
2686/* Use CTXT to warn If DEST_REG is a region that shouldn't be written to. */
2687
2688void
2689region_model::check_for_writable_region (const region* dest_reg,
2690 region_model_context *ctxt) const
2691{
2692 /* Fail gracefully if CTXT is NULL. */
2693 if (!ctxt)
2694 return;
2695
2696 const region *base_reg = dest_reg->get_base_region ();
2697 switch (base_reg->get_kind ())
2698 {
2699 default:
2700 break;
2701 case RK_FUNCTION:
2702 {
2703 const function_region *func_reg = as_a <const function_region *> (base_reg);
2704 tree fndecl = func_reg->get_fndecl ();
2705 ctxt->warn (make_unique<write_to_const_diagnostic>
2706 (func_reg, fndecl));
2707 }
2708 break;
2709 case RK_LABEL:
2710 {
2711 const label_region *label_reg = as_a <const label_region *> (base_reg);
2712 tree label = label_reg->get_label ();
2713 ctxt->warn (make_unique<write_to_const_diagnostic>
2714 (label_reg, label));
2715 }
2716 break;
2717 case RK_DECL:
2718 {
2719 const decl_region *decl_reg = as_a <const decl_region *> (base_reg);
2720 tree decl = decl_reg->get_decl ();
2721 /* Warn about writes to const globals.
2722 Don't warn for writes to const locals, and params in particular,
2723 since we would warn in push_frame when setting them up (e.g the
2724 "this" param is "T* const"). */
2725 if (TREE_READONLY (decl)((non_type_check ((decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2725, __FUNCTION__))->base.readonly_flag)
2726 && is_global_var (decl))
2727 ctxt->warn (make_unique<write_to_const_diagnostic> (dest_reg, decl));
2728 }
2729 break;
2730 case RK_STRING:
2731 ctxt->warn (make_unique<write_to_string_literal_diagnostic> (dest_reg));
2732 break;
2733 }
2734}
2735
2736/* Get the capacity of REG in bytes. */
2737
2738const svalue *
2739region_model::get_capacity (const region *reg) const
2740{
2741 switch (reg->get_kind ())
2742 {
2743 default:
2744 break;
2745 case RK_DECL:
2746 {
2747 const decl_region *decl_reg = as_a <const decl_region *> (reg);
2748 tree decl = decl_reg->get_decl ();
2749 if (TREE_CODE (decl)((enum tree_code) (decl)->base.code) == SSA_NAME)
2750 {
2751 tree type = TREE_TYPE (decl)((contains_struct_check ((decl), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2751, __FUNCTION__))->typed.type)
;
2752 tree size = TYPE_SIZE (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2752, __FUNCTION__))->type_common.size)
;
2753 return get_rvalue (size, NULLnullptr);
2754 }
2755 else
2756 {
2757 tree size = decl_init_size (decl, false);
2758 if (size)
2759 return get_rvalue (size, NULLnullptr);
2760 }
2761 }
2762 break;
2763 case RK_SIZED:
2764 /* Look through sized regions to get at the capacity
2765 of the underlying regions. */
2766 return get_capacity (reg->get_parent_region ());
2767 }
2768
2769 if (const svalue *recorded = get_dynamic_extents (reg))
2770 return recorded;
2771
2772 return m_mgr->get_or_create_unknown_svalue (sizetypesizetype_tab[(int) stk_sizetype]);
2773}
2774
2775/* Return the string size, including the 0-terminator, if SVAL is a
2776 constant_svalue holding a string. Otherwise, return an unknown_svalue. */
2777
2778const svalue *
2779region_model::get_string_size (const svalue *sval) const
2780{
2781 tree cst = sval->maybe_get_constant ();
2782 if (!cst || TREE_CODE (cst)((enum tree_code) (cst)->base.code) != STRING_CST)
2783 return m_mgr->get_or_create_unknown_svalue (size_type_nodeglobal_trees[TI_SIZE_TYPE]);
2784
2785 tree out = build_int_cst (size_type_nodeglobal_trees[TI_SIZE_TYPE], TREE_STRING_LENGTH (cst)((tree_check ((cst), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2785, __FUNCTION__, (STRING_CST)))->string.length)
);
2786 return m_mgr->get_or_create_constant_svalue (out);
2787}
2788
2789/* Return the string size, including the 0-terminator, if REG is a
2790 string_region. Otherwise, return an unknown_svalue. */
2791
2792const svalue *
2793region_model::get_string_size (const region *reg) const
2794{
2795 const string_region *str_reg = dyn_cast <const string_region *> (reg);
2796 if (!str_reg)
2797 return m_mgr->get_or_create_unknown_svalue (size_type_nodeglobal_trees[TI_SIZE_TYPE]);
2798
2799 tree cst = str_reg->get_string_cst ();
2800 tree out = build_int_cst (size_type_nodeglobal_trees[TI_SIZE_TYPE], TREE_STRING_LENGTH (cst)((tree_check ((cst), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2800, __FUNCTION__, (STRING_CST)))->string.length)
);
2801 return m_mgr->get_or_create_constant_svalue (out);
2802}
2803
2804/* If CTXT is non-NULL, use it to warn about any problems accessing REG,
2805 using DIR to determine if this access is a read or write. */
2806
2807void
2808region_model::check_region_access (const region *reg,
2809 enum access_direction dir,
2810 region_model_context *ctxt) const
2811{
2812 /* Fail gracefully if CTXT is NULL. */
2813 if (!ctxt)
2814 return;
2815
2816 check_region_for_taint (reg, dir, ctxt);
2817 check_region_bounds (reg, dir, ctxt);
2818
2819 switch (dir)
2820 {
2821 default:
2822 gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2822, __FUNCTION__))
;
2823 case DIR_READ:
2824 /* Currently a no-op. */
2825 break;
2826 case DIR_WRITE:
2827 check_for_writable_region (reg, ctxt);
2828 break;
2829 }
2830}
2831
2832/* If CTXT is non-NULL, use it to warn about any problems writing to REG. */
2833
2834void
2835region_model::check_region_for_write (const region *dest_reg,
2836 region_model_context *ctxt) const
2837{
2838 check_region_access (dest_reg, DIR_WRITE, ctxt);
2839}
2840
2841/* If CTXT is non-NULL, use it to warn about any problems reading from REG. */
2842
2843void
2844region_model::check_region_for_read (const region *src_reg,
2845 region_model_context *ctxt) const
2846{
2847 check_region_access (src_reg, DIR_READ, ctxt);
2848}
2849
2850/* Concrete subclass for casts of pointers that lead to trailing bytes. */
2851
2852class dubious_allocation_size
2853: public pending_diagnostic_subclass<dubious_allocation_size>
2854{
2855public:
2856 dubious_allocation_size (const region *lhs, const region *rhs)
2857 : m_lhs (lhs), m_rhs (rhs), m_expr (NULL_TREE(tree) nullptr),
2858 m_has_allocation_event (false)
2859 {}
2860
2861 dubious_allocation_size (const region *lhs, const region *rhs,
2862 tree expr)
2863 : m_lhs (lhs), m_rhs (rhs), m_expr (expr),
2864 m_has_allocation_event (false)
2865 {}
2866
2867 const char *get_kind () const final override
2868 {
2869 return "dubious_allocation_size";
2870 }
2871
2872 bool operator== (const dubious_allocation_size &other) const
2873 {
2874 return m_lhs == other.m_lhs && m_rhs == other.m_rhs
2875 && pending_diagnostic::same_tree_p (m_expr, other.m_expr);
2876 }
2877
2878 int get_controlling_option () const final override
2879 {
2880 return OPT_Wanalyzer_allocation_size;
2881 }
2882
2883 bool emit (rich_location *rich_loc) final override
2884 {
2885 diagnostic_metadata m;
2886 m.add_cwe (131);
2887
2888 return warning_meta (rich_loc, m, get_controlling_option (),
2889 "allocated buffer size is not a multiple"
2890 " of the pointee's size");
2891 }
2892
2893 label_text describe_final_event (const evdesc::final_event &ev) final
2894 override
2895 {
2896 tree pointee_type = TREE_TYPE (m_lhs->get_type ())((contains_struct_check ((m_lhs->get_type ()), (TS_TYPED),
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2896, __FUNCTION__))->typed.type)
;
2897 if (m_has_allocation_event)
2898 return ev.formatted_print ("assigned to %qT here;"
2899 " %<sizeof (%T)%> is %qE",
2900 m_lhs->get_type (), pointee_type,
2901 size_in_bytes (pointee_type));
2902 /* Fallback: Typically, we should always see an allocation_event
2903 before. */
2904 if (m_expr)
2905 {
2906 if (TREE_CODE (m_expr)((enum tree_code) (m_expr)->base.code) == INTEGER_CST)
2907 return ev.formatted_print ("allocated %E bytes and assigned to"
2908 " %qT here; %<sizeof (%T)%> is %qE",
2909 m_expr, m_lhs->get_type (), pointee_type,
2910 size_in_bytes (pointee_type));
2911 else
2912 return ev.formatted_print ("allocated %qE bytes and assigned to"
2913 " %qT here; %<sizeof (%T)%> is %qE",
2914 m_expr, m_lhs->get_type (), pointee_type,
2915 size_in_bytes (pointee_type));
2916 }
2917
2918 return ev.formatted_print ("allocated and assigned to %qT here;"
2919 " %<sizeof (%T)%> is %qE",
2920 m_lhs->get_type (), pointee_type,
2921 size_in_bytes (pointee_type));
2922 }
2923
2924 void
2925 add_region_creation_events (const region *,
2926 tree capacity,
2927 const event_loc_info &loc_info,
2928 checker_path &emission_path) final override
2929 {
2930 emission_path.add_event
2931 (make_unique<region_creation_event_allocation_size> (capacity, loc_info));
2932
2933 m_has_allocation_event = true;
2934 }
2935
2936 void mark_interesting_stuff (interesting_t *interest) final override
2937 {
2938 interest->add_region_creation (m_rhs);
2939 }
2940
2941private:
2942 const region *m_lhs;
2943 const region *m_rhs;
2944 const tree m_expr;
2945 bool m_has_allocation_event;
2946};
2947
2948/* Return true on dubious allocation sizes for constant sizes. */
2949
2950static bool
2951capacity_compatible_with_type (tree cst, tree pointee_size_tree,
2952 bool is_struct)
2953{
2954 gcc_assert (TREE_CODE (cst) == INTEGER_CST)((void)(!(((enum tree_code) (cst)->base.code) == INTEGER_CST
) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2954, __FUNCTION__), 0 : 0))
;
2955 gcc_assert (TREE_CODE (pointee_size_tree) == INTEGER_CST)((void)(!(((enum tree_code) (pointee_size_tree)->base.code
) == INTEGER_CST) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2955, __FUNCTION__), 0 : 0))
;
2956
2957 unsigned HOST_WIDE_INTlong pointee_size = TREE_INT_CST_LOW (pointee_size_tree)((unsigned long) (*tree_int_cst_elt_check ((pointee_size_tree
), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2957, __FUNCTION__)))
;
2958 unsigned HOST_WIDE_INTlong alloc_size = TREE_INT_CST_LOW (cst)((unsigned long) (*tree_int_cst_elt_check ((cst), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 2958, __FUNCTION__)))
;
2959
2960 if (is_struct)
2961 return alloc_size == 0 || alloc_size >= pointee_size;
2962 return alloc_size % pointee_size == 0;
2963}
2964
2965static bool
2966capacity_compatible_with_type (tree cst, tree pointee_size_tree)
2967{
2968 return capacity_compatible_with_type (cst, pointee_size_tree, false);
2969}
2970
2971/* Checks whether SVAL could be a multiple of SIZE_CST.
2972
2973 It works by visiting all svalues inside SVAL until it reaches
2974 atomic nodes. From those, it goes back up again and adds each
2975 node that might be a multiple of SIZE_CST to the RESULT_SET. */
2976
2977class size_visitor : public visitor
2978{
2979public:
2980 size_visitor (tree size_cst, const svalue *root_sval, constraint_manager *cm)
2981 : m_size_cst (size_cst), m_root_sval (root_sval), m_cm (cm)
2982 {
2983 m_root_sval->accept (this);
2984 }
2985
2986 bool get_result ()
2987 {
2988 return result_set.contains (m_root_sval);
2989 }
2990
2991 void visit_constant_svalue (const constant_svalue *sval) final override
2992 {
2993 check_constant (sval->get_constant (), sval);
2994 }
2995
2996 void visit_unknown_svalue (const unknown_svalue *sval ATTRIBUTE_UNUSED__attribute__ ((__unused__)))
2997 final override
2998 {
2999 result_set.add (sval);
3000 }
3001
3002 void visit_poisoned_svalue (const poisoned_svalue *sval ATTRIBUTE_UNUSED__attribute__ ((__unused__)))
3003 final override
3004 {
3005 result_set.add (sval);
3006 }
3007
3008 void visit_unaryop_svalue (const unaryop_svalue *sval) final override
3009 {
3010 const svalue *arg = sval->get_arg ();
3011 if (result_set.contains (arg))
3012 result_set.add (sval);
3013 }
3014
3015 void visit_binop_svalue (const binop_svalue *sval) final override
3016 {
3017 const svalue *arg0 = sval->get_arg0 ();
3018 const svalue *arg1 = sval->get_arg1 ();
3019
3020 if (sval->get_op () == MULT_EXPR)
3021 {
3022 if (result_set.contains (arg0) || result_set.contains (arg1))
3023 result_set.add (sval);
3024 }
3025 else
3026 {
3027 if (result_set.contains (arg0) && result_set.contains (arg1))
3028 result_set.add (sval);
3029 }
3030 }
3031
3032 void visit_repeated_svalue (const repeated_svalue *sval) final override
3033 {
3034 sval->get_inner_svalue ()->accept (this);
3035 if (result_set.contains (sval->get_inner_svalue ()))
3036 result_set.add (sval);
3037 }
3038
3039 void visit_unmergeable_svalue (const unmergeable_svalue *sval) final override
3040 {
3041 sval->get_arg ()->accept (this);
3042 if (result_set.contains (sval->get_arg ()))
3043 result_set.add (sval);
3044 }
3045
3046 void visit_widening_svalue (const widening_svalue *sval) final override
3047 {
3048 const svalue *base = sval->get_base_svalue ();
3049 const svalue *iter = sval->get_iter_svalue ();
3050
3051 if (result_set.contains (base) && result_set.contains (iter))
3052 result_set.add (sval);
3053 }
3054
3055 void visit_conjured_svalue (const conjured_svalue *sval ATTRIBUTE_UNUSED__attribute__ ((__unused__)))
3056 final override
3057 {
3058 equiv_class_id id (-1);
3059 if (m_cm->get_equiv_class_by_svalue (sval, &id))
3060 {
3061 if (tree cst = id.get_obj (*m_cm).get_any_constant ())
3062 check_constant (cst, sval);
3063 else
3064 result_set.add (sval);
3065 }
3066 }
3067
3068 void visit_asm_output_svalue (const asm_output_svalue *sval ATTRIBUTE_UNUSED__attribute__ ((__unused__)))
3069 final override
3070 {
3071 result_set.add (sval);
3072 }
3073
3074 void visit_const_fn_result_svalue (const const_fn_result_svalue
3075 *sval ATTRIBUTE_UNUSED__attribute__ ((__unused__))) final override
3076 {
3077 result_set.add (sval);
3078 }
3079
3080private:
3081 void check_constant (tree cst, const svalue *sval)
3082 {
3083 switch (TREE_CODE (cst)((enum tree_code) (cst)->base.code))
3084 {
3085 default:
3086 /* Assume all unhandled operands are compatible. */
3087 result_set.add (sval);
3088 break;
3089 case INTEGER_CST:
3090 if (capacity_compatible_with_type (cst, m_size_cst))
3091 result_set.add (sval);
3092 break;
3093 }
3094 }
3095
3096 tree m_size_cst;
3097 const svalue *m_root_sval;
3098 constraint_manager *m_cm;
3099 svalue_set result_set; /* Used as a mapping of svalue*->bool. */
3100};
3101
3102/* Return true if a struct or union either uses the inheritance pattern,
3103 where the first field is a base struct, or the flexible array member
3104 pattern, where the last field is an array without a specified size. */
3105
3106static bool
3107struct_or_union_with_inheritance_p (tree struc)
3108{
3109 tree iter = TYPE_FIELDS (struc)((tree_check3 ((struc), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 3109, __FUNCTION__, (RECORD_TYPE), (UNION_TYPE), (QUAL_UNION_TYPE
)))->type_non_common.values)
;
3110 if (iter == NULL_TREE(tree) nullptr)
3111 return false;
3112 if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (iter))(((enum tree_code) (((contains_struct_check ((iter), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 3112, __FUNCTION__))->typed.type))->base.code) == RECORD_TYPE
|| ((enum tree_code) (((contains_struct_check ((iter), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 3112, __FUNCTION__))->typed.type))->base.code) == UNION_TYPE
|| ((enum tree_code) (((contains_struct_check ((iter), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 3112, __FUNCTION__))->typed.type))->base.code) == QUAL_UNION_TYPE
)
)
3113 return true;
3114
3115 tree last_field;
3116 while (iter != NULL_TREE(tree) nullptr)
3117 {
3118 last_field = iter;
3119 iter = DECL_CHAIN (iter)(((contains_struct_check (((contains_struct_check ((iter), (TS_DECL_MINIMAL
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 3119, __FUNCTION__))), (TS_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 3119, __FUNCTION__))->common.chain))
;
3120 }
3121
3122 if (last_field != NULL_TREE(tree) nullptr
3123 && TREE_CODE (TREE_TYPE (last_field))((enum tree_code) (((contains_struct_check ((last_field), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 3123, __FUNCTION__))->typed.type))->base.code)
== ARRAY_TYPE)
3124 return true;
3125
3126 return false;
3127}
3128
3129/* Return true if the lhs and rhs of an assignment have different types. */
3130
3131static bool
3132is_any_cast_p (const gimple *stmt)
3133{
3134 if (const gassign *assign = dyn_cast <const gassign *> (stmt))
3135 return gimple_assign_cast_p (assign)
3136 || !pending_diagnostic::same_tree_p (
3137 TREE_TYPE (gimple_assign_lhs (assign))((contains_struct_check ((gimple_assign_lhs (assign)), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 3137, __FUNCTION__))->typed.type)
,
3138 TREE_TYPE (gimple_assign_rhs1 (assign))((contains_struct_check ((gimple_assign_rhs1 (assign)), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 3138, __FUNCTION__))->typed.type)
);
3139 else if (const gcall *call = dyn_cast <const gcall *> (stmt))
3140 {
3141 tree lhs = gimple_call_lhs (call);
3142 return lhs != NULL_TREE(tree) nullptr && !pending_diagnostic::same_tree_p (
3143 TREE_TYPE (gimple_call_lhs (call))((contains_struct_check ((gimple_call_lhs (call)), (TS_TYPED)
, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 3143, __FUNCTION__))->typed.type)
,
3144 gimple_call_return_type (call));
3145 }
3146
3147 return false;
3148}
3149
3150/* On pointer assignments, check whether the buffer size of
3151 RHS_SVAL is compatible with the type of the LHS_REG.
3152 Use a non-null CTXT to report allocation size warnings. */
3153
3154void
3155region_model::check_region_size (const region *lhs_reg, const svalue *rhs_sval,
3156 region_model_context *ctxt) const
3157{
3158 if (!ctxt || ctxt->get_stmt () == NULLnullptr)
3159 return;
3160 /* Only report warnings on assignments that actually change the type. */
3161 if (!is_any_cast_p (ctxt->get_stmt ()))
3162 return;
3163
3164 const region_svalue *reg_sval = dyn_cast <const region_svalue *> (rhs_sval);
3165 if (!reg_sval)
3166 return;
3167
3168 tree pointer_type = lhs_reg->get_type ();
3169 if (pointer_type == NULL_TREE(tree) nullptr || !POINTER_TYPE_P (pointer_type)(((enum tree_code) (pointer_type)->base.code) == POINTER_TYPE
|| ((enum tree_code) (pointer_type)->base.code) == REFERENCE_TYPE
)
)
3170 return;
3171
3172 tree pointee_type = TREE_TYPE (pointer_type)((contains_struct_check ((pointer_type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 3172, __FUNCTION__))->typed.type)
;
3173 /* Make sure that the type on the left-hand size actually has a size. */
3174 if (pointee_type == NULL_TREE(tree) nullptr || VOID_TYPE_P (pointee_type)(((enum tree_code) (pointee_type)->base.code) == VOID_TYPE
)
3175 || TYPE_SIZE_UNIT (pointee_type)((tree_class_check ((pointee_type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 3175, __FUNCTION__))->type_common.size_unit)
== NULL_TREE(tree) nullptr)
3176 return;
3177
3178 /* Bail out early on pointers to structs where we can
3179 not deduce whether the buffer size is compatible. */
3180 bool is_struct = RECORD_OR_UNION_TYPE_P (pointee_type)(((enum tree_code) (pointee_type)->base.code) == RECORD_TYPE
|| ((enum tree_code) (pointee_type)->base.code) == UNION_TYPE
|| ((enum tree_code) (pointee_type)->base.code) == QUAL_UNION_TYPE
)
;
3181 if (is_struct && struct_or_union_with_inheritance_p (pointee_type))
3182 return;
3183
3184 tree pointee_size_tree = size_in_bytes (pointee_type);
3185 /* We give up if the type size is not known at compile-time or the
3186 type size is always compatible regardless of the buffer size. */
3187 if (TREE_CODE (pointee_size_tree)((enum tree_code) (pointee_size_tree)->base.code) != INTEGER_CST
3188 || integer_zerop (pointee_size_tree)
3189 || integer_onep (pointee_size_tree))
3190 return;
3191
3192 const region *rhs_reg = reg_sval->get_pointee ();
3193 const svalue *capacity = get_capacity (rhs_reg);
3194 switch (capacity->get_kind ())
3195 {
3196 case svalue_kind::SK_CONSTANT:
3197 {
3198 const constant_svalue *cst_cap_sval
3199 = as_a <const constant_svalue *> (capacity);
3200 tree cst_cap = cst_cap_sval->get_constant ();
3201 if (TREE_CODE (cst_cap)((enum tree_code) (cst_cap)->base.code) == INTEGER_CST
3202 && !capacity_compatible_with_type (cst_cap, pointee_size_tree,
3203 is_struct))
3204 ctxt->warn (make_unique <dubious_allocation_size> (lhs_reg, rhs_reg,
3205 cst_cap));
3206 }
3207 break;
3208 default:
3209 {
3210 if (!is_struct)
3211 {
3212 size_visitor v (pointee_size_tree, capacity, m_constraints);
3213 if (!v.get_result ())
3214 {
3215 tree expr = get_representative_tree (capacity);
3216 ctxt->warn (make_unique <dubious_allocation_size> (lhs_reg,
3217 rhs_reg,
3218 expr));
3219 }
3220 }
3221 break;
3222 }
3223 }
3224}
3225
3226/* Set the value of the region given by LHS_REG to the value given
3227 by RHS_SVAL.
3228 Use CTXT to report any warnings associated with writing to LHS_REG. */
3229
3230void
3231region_model::set_value (const region *lhs_reg, const svalue *rhs_sval,
3232 region_model_context *ctxt)
3233{
3234 gcc_assert (lhs_reg)((void)(!(lhs_reg) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 3234, __FUNCTION__), 0 : 0))
;
3235 gcc_assert (rhs_sval)((void)(!(rhs_sval) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 3235, __FUNCTION__), 0 : 0))
;
3236
3237 /* Setting the value of an empty region is a no-op. */
3238 if (lhs_reg->empty_p ())
3239 return;
3240
3241 check_region_size (lhs_reg, rhs_sval, ctxt);
3242
3243 check_region_for_write (lhs_reg, ctxt);
3244
3245 m_store.set_value (m_mgr->get_store_manager(), lhs_reg, rhs_sval,
3246 ctxt ? ctxt->get_uncertainty () : NULLnullptr);
3247}
3248
3249/* Set the value of the region given by LHS to the value given by RHS. */
3250
3251void
3252region_model::set_value (tree lhs, tree rhs, region_model_context *ctxt)
3253{
3254 const region *lhs_reg = get_lvalue (lhs, ctxt);
3255 const svalue *rhs_sval = get_rvalue (rhs, ctxt);
3256 gcc_assert (lhs_reg)((void)(!(lhs_reg) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 3256, __FUNCTION__), 0 : 0))
;
3257 gcc_assert (rhs_sval)((void)(!(rhs_sval) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 3257, __FUNCTION__), 0 : 0))
;
3258 set_value (lhs_reg, rhs_sval, ctxt);
3259}
3260
3261/* Remove all bindings overlapping REG within the store. */
3262
3263void
3264region_model::clobber_region (const region *reg)
3265{
3266 m_store.clobber_region (m_mgr->get_store_manager(), reg);
3267}
3268
3269/* Remove any bindings for REG within the store. */
3270
3271void
3272region_model::purge_region (const region *reg)
3273{
3274 m_store.purge_region (m_mgr->get_store_manager(), reg);
3275}
3276
3277/* Fill REG with SVAL. */
3278
3279void
3280region_model::fill_region (const region *reg, const svalue *sval)
3281{
3282 m_store.fill_region (m_mgr->get_store_manager(), reg, sval);
3283}
3284
3285/* Zero-fill REG. */
3286
3287void
3288region_model::zero_fill_region (const region *reg)
3289{
3290 m_store.zero_fill_region (m_mgr->get_store_manager(), reg);
3291}
3292
3293/* Mark REG as having unknown content. */
3294
3295void
3296region_model::mark_region_as_unknown (const region *reg,
3297 uncertainty_t *uncertainty)
3298{
3299 svalue_set maybe_live_values;
3300 m_store.mark_region_as_unknown (m_mgr->get_store_manager(), reg,
3301 uncertainty, &maybe_live_values);
3302 m_store.on_maybe_live_values (maybe_live_values);
3303}
3304
3305/* Determine what is known about the condition "LHS_SVAL OP RHS_SVAL" within
3306 this model. */
3307
3308tristate
3309region_model::eval_condition (const svalue *lhs,
3310 enum tree_code op,
3311 const svalue *rhs) const
3312{
3313 gcc_assert (lhs)((void)(!(lhs) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 3313, __FUNCTION__), 0 : 0))
;
3314 gcc_assert (rhs)((void)(!(rhs) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 3314, __FUNCTION__), 0 : 0))
;
3315
3316 /* For now, make no attempt to capture constraints on floating-point
3317 values. */
3318 if ((lhs->get_type () && FLOAT_TYPE_P (lhs->get_type ())((((enum tree_code) (lhs->get_type ())->base.code) == REAL_TYPE
) || ((((enum tree_code) (lhs->get_type ())->base.code)
== COMPLEX_TYPE || (((enum tree_code) (lhs->get_type ())->
base.code) == VECTOR_TYPE)) && (((enum tree_code) (((
contains_struct_check ((lhs->get_type ()), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 3318, __FUNCTION__))->typed.type))->base.code) == REAL_TYPE
)))
)
3319 || (rhs->get_type () && FLOAT_TYPE_P (rhs->get_type ())((((enum tree_code) (rhs->get_type ())->base.code) == REAL_TYPE
) || ((((enum tree_code) (rhs->get_type ())->base.code)
== COMPLEX_TYPE || (((enum tree_code) (rhs->get_type ())->
base.code) == VECTOR_TYPE)) && (((enum tree_code) (((
contains_struct_check ((rhs->get_type ()), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 3319, __FUNCTION__))->typed.type))->base.code) == REAL_TYPE
)))
))
3320 return tristate::unknown ();
3321
3322 /* See what we know based on the values. */
3323
3324 /* Unwrap any unmergeable values. */
3325 lhs = lhs->unwrap_any_unmergeable ();
3326 rhs = rhs->unwrap_any_unmergeable ();
3327
3328 if (lhs == rhs)
3329 {
3330 /* If we have the same svalue, then we have equality
3331 (apart from NaN-handling).
3332 TODO: should this definitely be the case for poisoned values? */
3333 /* Poisoned and unknown values are "unknowable". */
3334 if (lhs->get_kind () == SK_POISONED
3335 || lhs->get_kind () == SK_UNKNOWN)
3336 return tristate::TS_UNKNOWN;
3337
3338 switch (op)
3339 {
3340 case EQ_EXPR:
3341 case GE_EXPR:
3342 case LE_EXPR:
3343 return tristate::TS_TRUE;
3344
3345 case NE_EXPR:
3346 case GT_EXPR:
3347 case LT_EXPR:
3348 return tristate::TS_FALSE;
3349
3350 default:
3351 /* For other ops, use the logic below. */
3352 break;
3353 }
3354 }
3355
3356 /* If we have a pair of region_svalues, compare them. */
3357 if (const region_svalue *lhs_ptr = lhs->dyn_cast_region_svalue ())
3358 if (const region_svalue *rhs_ptr = rhs->dyn_cast_region_svalue ())
3359 {
3360 tristate res = region_svalue::eval_condition (lhs_ptr, op, rhs_ptr);
3361 if (res.is_known ())
3362 return res;
3363 /* Otherwise, only known through constraints. */
3364 }
3365
3366 if (const constant_svalue *cst_lhs = lhs->dyn_cast_constant_svalue ())
3367 {
3368 /* If we have a pair of constants, compare them. */
3369 if (const constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ())
3370 return constant_svalue::eval_condition (cst_lhs, op, cst_rhs);
3371 else
3372 {
3373 /* When we have one constant, put it on the RHS. */
3374 std::swap (lhs, rhs);
3375 op = swap_tree_comparison (op);
3376 }
3377 }
3378 gcc_assert (lhs->get_kind () != SK_CONSTANT)((void)(!(lhs->get_kind () != SK_CONSTANT) ? fancy_abort (
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 3378, __FUNCTION__), 0 : 0))
;
3379
3380 /* Handle comparison against zero. */
3381 if (const constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ())
3382 if (zerop (cst_rhs->get_constant ()))
3383 {
3384 if (const region_svalue *ptr = lhs->dyn_cast_region_svalue ())
3385 {
3386 /* A region_svalue is a non-NULL pointer, except in certain
3387 special cases (see the comment for region::non_null_p). */
3388 const region *pointee = ptr->get_pointee ();
3389 if (pointee->non_null_p ())
3390 {
3391 switch (op)
3392 {
3393 default:
3394 gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 3394, __FUNCTION__))
;
3395
3396 case EQ_EXPR:
3397 case GE_EXPR:
3398 case LE_EXPR:
3399 return tristate::TS_FALSE;
3400
3401 case NE_EXPR:
3402 case GT_EXPR:
3403 case LT_EXPR:
3404 return tristate::TS_TRUE;
3405 }
3406 }
3407 }
3408 else if (const binop_svalue *binop = lhs->dyn_cast_binop_svalue ())
3409 {
3410 /* Treat offsets from a non-NULL pointer as being non-NULL. This
3411 isn't strictly true, in that eventually ptr++ will wrap
3412 around and be NULL, but it won't occur in practise and thus
3413 can be used to suppress effectively false positives that we
3414 shouldn't warn for. */
3415 if (binop->get_op () == POINTER_PLUS_EXPR)
3416 {
3417 tristate lhs_ts = eval_condition (binop->get_arg0 (), op, rhs);
3418 if (lhs_ts.is_known ())
3419 return lhs_ts;
3420 }
3421 }
3422 else if (const unaryop_svalue *unaryop
3423 = lhs->dyn_cast_unaryop_svalue ())
3424 {
3425 if (unaryop->get_op () == NEGATE_EXPR)
3426 {
3427 /* e.g. "-X <= 0" is equivalent to X >= 0". */
3428 tristate lhs_ts = eval_condition (unaryop->get_arg (),
3429 swap_tree_comparison (op),
3430 rhs);
3431 if (lhs_ts.is_known ())
3432 return lhs_ts;
3433 }
3434 }
3435 }
3436
3437 /* Handle rejection of equality for comparisons of the initial values of
3438 "external" values (such as params) with the address of locals. */
3439 if (const initial_svalue *init_lhs = lhs->dyn_cast_initial_svalue ())
3440 if (const region_svalue *rhs_ptr = rhs->dyn_cast_region_svalue ())
3441 {
3442 tristate res = compare_initial_and_pointer (init_lhs, rhs_ptr);
3443 if (res.is_known ())
3444 return res;
3445 }
3446 if (const initial_svalue *init_rhs = rhs->dyn_cast_initial_svalue ())
3447 if (const region_svalue *lhs_ptr = lhs->dyn_cast_region_svalue ())
3448 {
3449 tristate res = compare_initial_and_pointer (init_rhs, lhs_ptr);
3450 if (res.is_known ())
3451 return res;
3452 }
3453
3454 if (const widening_svalue *widen_lhs = lhs->dyn_cast_widening_svalue ())
3455 if (tree rhs_cst = rhs->maybe_get_constant ())
3456 {
3457 tristate res = widen_lhs->eval_condition_without_cm (op, rhs_cst);
3458 if (res.is_known ())
3459 return res;
3460 }
3461
3462 /* Handle comparisons between two svalues with more than one operand. */
3463 if (const binop_svalue *binop = lhs->dyn_cast_binop_svalue ())
3464 {
3465 switch (op)
3466 {
3467 default:
3468 break;
3469 case EQ_EXPR:
3470 {
3471 /* TODO: binops can be equal even if they are not structurally
3472 equal in case of commutative operators. */
3473 tristate res = structural_equality (lhs, rhs);
3474 if (res.is_true ())
3475 return res;
3476 }
3477 break;
3478 case LE_EXPR:
3479 {
3480 tristate res = structural_equality (lhs, rhs);
3481 if (res.is_true ())
3482 return res;
3483 }
3484 break;
3485 case GE_EXPR:
3486 {
3487 tristate res = structural_equality (lhs, rhs);
3488 if (res.is_true ())
3489 return res;
3490 res = symbolic_greater_than (binop, rhs);
3491 if (res.is_true ())
3492 return res;
3493 }
3494 break;
3495 case GT_EXPR:
3496 {
3497 tristate res = symbolic_greater_than (binop, rhs);
3498 if (res.is_true ())
3499 return res;
3500 }
3501 break;
3502 }
3503 }
3504
3505 /* Otherwise, try constraints.
3506 Cast to const to ensure we don't change the constraint_manager as we
3507 do this (e.g. by creating equivalence classes). */
3508 const constraint_manager *constraints = m_constraints;
3509 return constraints->eval_condition (lhs, op, rhs);
3510}
3511
3512/* Subroutine of region_model::eval_condition, for rejecting
3513 equality of INIT_VAL(PARM) with &LOCAL. */
3514
3515tristate
3516region_model::compare_initial_and_pointer (const initial_svalue *init,
3517 const region_svalue *ptr) const
3518{
3519 const region *pointee = ptr->get_pointee ();
3520
3521 /* If we have a pointer to something within a stack frame, it can't be the
3522 initial value of a param. */
3523 if (pointee->maybe_get_frame_region ())
3524 if (init->initial_value_of_param_p ())
3525 return tristate::TS_FALSE;
3526
3527 return tristate::TS_UNKNOWN;
3528}
3529
3530/* Return true if SVAL is definitely positive. */
3531
3532static bool
3533is_positive_svalue (const svalue *sval)
3534{
3535 if (tree cst = sval->maybe_get_constant ())
3536 return !zerop (cst) && get_range_pos_neg (cst) == 1;
3537 tree type = sval->get_type ();
3538 if (!type)
3539 return false;
3540 /* Consider a binary operation size_t + int. The analyzer wraps the int in
3541 an unaryop_svalue, converting it to a size_t, but in the dynamic execution
3542 the result is smaller than the first operand. Thus, we have to look if
3543 the argument of the unaryop_svalue is also positive. */
3544 if (const unaryop_svalue *un_op = dyn_cast <const unaryop_svalue *> (sval))
3545 return CONVERT_EXPR_CODE_P (un_op->get_op ())((un_op->get_op ()) == NOP_EXPR || (un_op->get_op ()) ==
CONVERT_EXPR)
&& TYPE_UNSIGNED (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 3545, __FUNCTION__))->base.u.bits.unsigned_flag)
3546 && is_positive_svalue (un_op->get_arg ());
3547 return TYPE_UNSIGNED (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 3547, __FUNCTION__))->base.u.bits.unsigned_flag)
;
3548}
3549
3550/* Return true if A is definitely larger than B.
3551
3552 Limitation: does not account for integer overflows and does not try to
3553 return false, so it can not be used negated. */
3554
3555tristate
3556region_model::symbolic_greater_than (const binop_svalue *bin_a,
3557 const svalue *b) const
3558{
3559 if (bin_a->get_op () == PLUS_EXPR || bin_a->get_op () == MULT_EXPR)
3560 {
3561 /* Eliminate the right-hand side of both svalues. */
3562 if (const binop_svalue *bin_b = dyn_cast <const binop_svalue *> (b))
3563 if (bin_a->get_op () == bin_b->get_op ()
3564 && eval_condition (bin_a->get_arg1 (),
3565 GT_EXPR,
3566 bin_b->get_arg1 ()).is_true ()
3567 && eval_condition (bin_a->get_arg0 (),
3568 GE_EXPR,
3569 bin_b->get_arg0 ()).is_true ())
3570 return tristate (tristate::TS_TRUE);
3571
3572 /* Otherwise, try to remove a positive offset or factor from BIN_A. */
3573 if (is_positive_svalue (bin_a->get_arg1 ())
3574 && eval_condition (bin_a->get_arg0 (),
3575 GE_EXPR, b).is_true ())
3576 return tristate (tristate::TS_TRUE);
3577 }
3578 return tristate::unknown ();
3579}
3580
3581/* Return true if A and B are equal structurally.
3582
3583 Structural equality means that A and B are equal if the svalues A and B have
3584 the same nodes at the same positions in the tree and the leafs are equal.
3585 Equality for conjured_svalues and initial_svalues is determined by comparing
3586 the pointers while constants are compared by value. That behavior is useful
3587 to check for binaryop_svlaues that evaluate to the same concrete value but
3588 might use one operand with a different type but the same constant value.
3589
3590 For example,
3591 binop_svalue (mult_expr,
3592 initial_svalue (‘size_t’, decl_region (..., 'some_var')),
3593 constant_svalue (‘size_t’, 4))
3594 and
3595 binop_svalue (mult_expr,
3596 initial_svalue (‘size_t’, decl_region (..., 'some_var'),
3597 constant_svalue (‘sizetype’, 4))
3598 are structurally equal. A concrete C code example, where this occurs, can
3599 be found in test7 of out-of-bounds-5.c. */
3600
3601tristate
3602region_model::structural_equality (const svalue *a, const svalue *b) const
3603{
3604 /* If A and B are referentially equal, they are also structurally equal. */
3605 if (a == b)
3606 return tristate (tristate::TS_TRUE);
3607
3608 switch (a->get_kind ())
3609 {
3610 default:
3611 return tristate::unknown ();
3612 /* SK_CONJURED and SK_INITIAL are already handled
3613 by the referential equality above. */
3614 case SK_CONSTANT:
3615 {
3616 tree a_cst = a->maybe_get_constant ();
3617 tree b_cst = b->maybe_get_constant ();
3618 if (a_cst && b_cst)
3619 return tristate (tree_int_cst_equal (a_cst, b_cst));
3620 }
3621 return tristate (tristate::TS_FALSE);
3622 case SK_UNARYOP:
3623 {
3624 const unaryop_svalue *un_a = as_a <const unaryop_svalue *> (a);
3625 if (const unaryop_svalue *un_b = dyn_cast <const unaryop_svalue *> (b))
3626 return tristate (pending_diagnostic::same_tree_p (un_a->get_type (),
3627 un_b->get_type ())
3628 && un_a->get_op () == un_b->get_op ()
3629 && structural_equality (un_a->get_arg (),
3630 un_b->get_arg ()));
3631 }
3632 return tristate (tristate::TS_FALSE);
3633 case SK_BINOP:
3634 {
3635 const binop_svalue *bin_a = as_a <const binop_svalue *> (a);
3636 if (const binop_svalue *bin_b = dyn_cast <const binop_svalue *> (b))
3637 return tristate (bin_a->get_op () == bin_b->get_op ()
3638 && structural_equality (bin_a->get_arg0 (),
3639 bin_b->get_arg0 ())
3640 && structural_equality (bin_a->get_arg1 (),
3641 bin_b->get_arg1 ()));
3642 }
3643 return tristate (tristate::TS_FALSE);
3644 }
3645}
3646
3647/* Handle various constraints of the form:
3648 LHS: ((bool)INNER_LHS INNER_OP INNER_RHS))
3649 OP : == or !=
3650 RHS: zero
3651 and (with a cast):
3652 LHS: CAST([long]int, ((bool)INNER_LHS INNER_OP INNER_RHS))
3653 OP : == or !=
3654 RHS: zero
3655 by adding constraints for INNER_LHS INNEROP INNER_RHS.
3656
3657 Return true if this function can fully handle the constraint; if
3658 so, add the implied constraint(s) and write true to *OUT if they
3659 are consistent with existing constraints, or write false to *OUT
3660 if they contradicts existing constraints.
3661
3662 Return false for cases that this function doeesn't know how to handle.
3663
3664 For example, if we're checking a stored conditional, we'll have
3665 something like:
3666 LHS: CAST(long int, (&HEAP_ALLOCATED_REGION(8)!=(int *)0B))
3667 OP : NE_EXPR
3668 RHS: zero
3669 which this function can turn into an add_constraint of:
3670 (&HEAP_ALLOCATED_REGION(8) != (int *)0B)
3671
3672 Similarly, optimized && and || conditionals lead to e.g.
3673 if (p && q)
3674 becoming gimple like this:
3675 _1 = p_6 == 0B;
3676 _2 = q_8 == 0B
3677 _3 = _1 | _2
3678 On the "_3 is false" branch we can have constraints of the form:
3679 ((&HEAP_ALLOCATED_REGION(8)!=(int *)0B)
3680 | (&HEAP_ALLOCATED_REGION(10)!=(int *)0B))
3681 == 0
3682 which implies that both _1 and _2 are false,
3683 which this function can turn into a pair of add_constraints of
3684 (&HEAP_ALLOCATED_REGION(8)!=(int *)0B)
3685 and:
3686 (&HEAP_ALLOCATED_REGION(10)!=(int *)0B). */
3687
3688bool
3689region_model::add_constraints_from_binop (const svalue *outer_lhs,
3690 enum tree_code outer_op,
3691 const svalue *outer_rhs,
3692 bool *out,
3693 region_model_context *ctxt)
3694{
3695 while (const svalue *cast = outer_lhs->maybe_undo_cast ())
3696 outer_lhs = cast;
3697 const binop_svalue *binop_sval = outer_lhs->dyn_cast_binop_svalue ();
3698 if (!binop_sval)
3699 return false;
3700 if (!outer_rhs->all_zeroes_p ())
3701 return false;
3702
3703 const svalue *inner_lhs = binop_sval->get_arg0 ();
3704 enum tree_code inner_op = binop_sval->get_op ();
3705 const svalue *inner_rhs = binop_sval->get_arg1 ();
3706
3707 if (outer_op != NE_EXPR && outer_op != EQ_EXPR)
3708 return false;
3709
3710 /* We have either
3711 - "OUTER_LHS != false" (i.e. OUTER is true), or
3712 - "OUTER_LHS == false" (i.e. OUTER is false). */
3713 bool is_true = outer_op == NE_EXPR;
3714
3715 switch (inner_op)
3716 {
3717 default:
3718 return false;
3719
3720 case EQ_EXPR:
3721 case NE_EXPR:
3722 {
3723 /* ...and "(inner_lhs OP inner_rhs) == 0"
3724 then (inner_lhs OP inner_rhs) must have the same
3725 logical value as LHS. */
3726 if (!is_true)
3727 inner_op = invert_tree_comparison (inner_op, false /* honor_nans */);
3728 *out = add_constraint (inner_lhs, inner_op, inner_rhs, ctxt);
3729 return true;
3730 }
3731 break;
3732
3733 case BIT_AND_EXPR:
3734 if (is_true)
3735 {
3736 /* ...and "(inner_lhs & inner_rhs) != 0"
3737 then both inner_lhs and inner_rhs must be true. */
3738 const svalue *false_sval
3739 = m_mgr->get_or_create_constant_svalue (boolean_false_nodeglobal_trees[TI_BOOLEAN_FALSE]);
3740 bool sat1 = add_constraint (inner_lhs, NE_EXPR, false_sval, ctxt);
3741 bool sat2 = add_constraint (inner_rhs, NE_EXPR, false_sval, ctxt);
3742 *out = sat1 && sat2;
3743 return true;
3744 }
3745 return false;
3746
3747 case BIT_IOR_EXPR:
3748 if (!is_true)
3749 {
3750 /* ...and "(inner_lhs | inner_rhs) == 0"
3751 i.e. "(inner_lhs | inner_rhs)" is false
3752 then both inner_lhs and inner_rhs must be false. */
3753 const svalue *false_sval
3754 = m_mgr->get_or_create_constant_svalue (boolean_false_nodeglobal_trees[TI_BOOLEAN_FALSE]);
3755 bool sat1 = add_constraint (inner_lhs, EQ_EXPR, false_sval, ctxt);
3756 bool sat2 = add_constraint (inner_rhs, EQ_EXPR, false_sval, ctxt);
3757 *out = sat1 && sat2;
3758 return true;
3759 }
3760 return false;
3761 }
3762}
3763
3764/* Attempt to add the constraint "LHS OP RHS" to this region_model.
3765 If it is consistent with existing constraints, add it, and return true.
3766 Return false if it contradicts existing constraints.
3767 Use CTXT for reporting any diagnostics associated with the accesses. */
3768
3769bool
3770region_model::add_constraint (tree lhs, enum tree_code op, tree rhs,
3771 region_model_context *ctxt)
3772{
3773 /* For now, make no attempt to capture constraints on floating-point
3774 values. */
3775 if (FLOAT_TYPE_P (TREE_TYPE (lhs))((((enum tree_code) (((contains_struct_check ((lhs), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 3775, __FUNCTION__))->typed.type))->base.code) == REAL_TYPE
) || ((((enum tree_code) (((contains_struct_check ((lhs), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 3775, __FUNCTION__))->typed.type))->base.code) == COMPLEX_TYPE
|| (((enum tree_code) (((contains_struct_check ((lhs), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 3775, __FUNCTION__))->typed.type))->base.code) == VECTOR_TYPE
)) && (((enum tree_code) (((contains_struct_check (((
(contains_struct_check ((lhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 3775, __FUNCTION__))->typed.type)), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 3775, __FUNCTION__))->typed.type))->base.code) == REAL_TYPE
)))
|| FLOAT_TYPE_P (TREE_TYPE (rhs))((((enum tree_code) (((contains_struct_check ((rhs), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 3775, __FUNCTION__))->typed.type))->base.code) == REAL_TYPE
) || ((((enum tree_code) (((contains_struct_check ((rhs), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 3775, __FUNCTION__))->typed.type))->base.code) == COMPLEX_TYPE
|| (((enum tree_code) (((contains_struct_check ((rhs), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 3775, __FUNCTION__))->typed.type))->base.code) == VECTOR_TYPE
)) && (((enum tree_code) (((contains_struct_check (((
(contains_struct_check ((rhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 3775, __FUNCTION__))->typed.type)), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 3775, __FUNCTION__))->typed.type))->base.code) == REAL_TYPE
)))
)
3776 return true;
3777
3778 const svalue *lhs_sval = get_rvalue (lhs, ctxt);
3779 const svalue *rhs_sval = get_rvalue (rhs, ctxt);
3780
3781 return add_constraint (lhs_sval, op, rhs_sval, ctxt);
3782}
3783
3784/* Attempt to add the constraint "LHS OP RHS" to this region_model.
3785 If it is consistent with existing constraints, add it, and return true.
3786 Return false if it contradicts existing constraints.
3787 Use CTXT for reporting any diagnostics associated with the accesses. */
3788
3789bool
3790region_model::add_constraint (const svalue *lhs,
3791 enum tree_code op,
3792 const svalue *rhs,
3793 region_model_context *ctxt)
3794{
3795 tristate t_cond = eval_condition (lhs, op, rhs);
3796
3797 /* If we already have the condition, do nothing. */
3798 if (t_cond.is_true ())
3799 return true;
3800
3801 /* Reject a constraint that would contradict existing knowledge, as
3802 unsatisfiable. */
3803 if (t_cond.is_false ())
3804 return false;
3805
3806 bool out;
3807 if (add_constraints_from_binop (lhs, op, rhs, &out, ctxt))
3808 return out;
3809
3810 /* Attempt to store the constraint. */
3811 if (!m_constraints->add_constraint (lhs, op, rhs))
3812 return false;
3813
3814 /* Notify the context, if any. This exists so that the state machines
3815 in a program_state can be notified about the condition, and so can
3816 set sm-state for e.g. unchecked->checked, both for cfg-edges, and
3817 when synthesizing constraints as above. */
3818 if (ctxt)
3819 ctxt->on_condition (lhs, op, rhs);
3820
3821 /* If we have &REGION == NULL, then drop dynamic extents for REGION (for
3822 the case where REGION is heap-allocated and thus could be NULL). */
3823 if (tree rhs_cst = rhs->maybe_get_constant ())
3824 if (op == EQ_EXPR && zerop (rhs_cst))
3825 if (const region_svalue *region_sval = lhs->dyn_cast_region_svalue ())
3826 unset_dynamic_extents (region_sval->get_pointee ());
3827
3828 return true;
3829}
3830
3831/* As above, but when returning false, if OUT is non-NULL, write a
3832 new rejected_constraint to *OUT. */
3833
3834bool
3835region_model::add_constraint (tree lhs, enum tree_code op, tree rhs,
3836 region_model_context *ctxt,
3837 rejected_constraint **out)
3838{
3839 bool sat = add_constraint (lhs, op, rhs, ctxt);
3840 if (!sat && out)
3841 *out = new rejected_op_constraint (*this, lhs, op, rhs);
3842 return sat;
3843}
3844
3845/* Determine what is known about the condition "LHS OP RHS" within
3846 this model.
3847 Use CTXT for reporting any diagnostics associated with the accesses. */
3848
3849tristate
3850region_model::eval_condition (tree lhs,
3851 enum tree_code op,
3852 tree rhs,
3853 region_model_context *ctxt) const
3854{
3855 /* For now, make no attempt to model constraints on floating-point
3856 values. */
3857 if (FLOAT_TYPE_P (TREE_TYPE (lhs))((((enum tree_code) (((contains_struct_check ((lhs), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 3857, __FUNCTION__))->typed.type))->base.code) == REAL_TYPE
) || ((((enum tree_code) (((contains_struct_check ((lhs), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 3857, __FUNCTION__))->typed.type))->base.code) == COMPLEX_TYPE
|| (((enum tree_code) (((contains_struct_check ((lhs), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 3857, __FUNCTION__))->typed.type))->base.code) == VECTOR_TYPE
)) && (((enum tree_code) (((contains_struct_check (((
(contains_struct_check ((lhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 3857, __FUNCTION__))->typed.type)), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 3857, __FUNCTION__))->typed.type))->base.code) == REAL_TYPE
)))
|| FLOAT_TYPE_P (TREE_TYPE (rhs))((((enum tree_code) (((contains_struct_check ((rhs), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 3857, __FUNCTION__))->typed.type))->base.code) == REAL_TYPE
) || ((((enum tree_code) (((contains_struct_check ((rhs), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 3857, __FUNCTION__))->typed.type))->base.code) == COMPLEX_TYPE
|| (((enum tree_code) (((contains_struct_check ((rhs), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 3857, __FUNCTION__))->typed.type))->base.code) == VECTOR_TYPE
)) && (((enum tree_code) (((contains_struct_check (((
(contains_struct_check ((rhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 3857, __FUNCTION__))->typed.type)), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 3857, __FUNCTION__))->typed.type))->base.code) == REAL_TYPE
)))
)
3858 return tristate::unknown ();
3859
3860 return eval_condition (get_rvalue (lhs, ctxt), op, get_rvalue (rhs, ctxt));
3861}
3862
3863/* Implementation of region_model::get_representative_path_var.
3864 Attempt to return a path_var that represents SVAL, or return NULL_TREE.
3865 Use VISITED to prevent infinite mutual recursion with the overload for
3866 regions. */
3867
3868path_var
3869region_model::get_representative_path_var_1 (const svalue *sval,
3870 svalue_set *visited) const
3871{
3872 gcc_assert (sval)((void)(!(sval) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 3872, __FUNCTION__), 0 : 0))
;
3873
3874 /* Prevent infinite recursion. */
3875 if (visited->contains (sval))
3876 return path_var (NULL_TREE(tree) nullptr, 0);
3877 visited->add (sval);
3878
3879 /* Handle casts by recursion into get_representative_path_var. */
3880 if (const svalue *cast_sval = sval->maybe_undo_cast ())
3881 {
3882 path_var result = get_representative_path_var (cast_sval, visited);
3883 tree orig_type = sval->get_type ();
3884 /* If necessary, wrap the result in a cast. */
3885 if (result.m_tree && orig_type)
3886 result.m_tree = build1 (NOP_EXPR, orig_type, result.m_tree);
3887 return result;
3888 }
3889
3890 auto_vec<path_var> pvs;
3891 m_store.get_representative_path_vars (this, visited, sval, &pvs);
3892
3893 if (tree cst = sval->maybe_get_constant ())
3894 pvs.safe_push (path_var (cst, 0));
3895
3896 /* Handle string literals and various other pointers. */
3897 if (const region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
3898 {
3899 const region *reg = ptr_sval->get_pointee ();
3900 if (path_var pv = get_representative_path_var (reg, visited))
3901 return path_var (build1 (ADDR_EXPR,
3902 sval->get_type (),
3903 pv.m_tree),
3904 pv.m_stack_depth);
3905 }
3906
3907 /* If we have a sub_svalue, look for ways to represent the parent. */
3908 if (const sub_svalue *sub_sval = sval->dyn_cast_sub_svalue ())
3909 {
3910 const svalue *parent_sval = sub_sval->get_parent ();
3911 const region *subreg = sub_sval->get_subregion ();
3912 if (path_var parent_pv
3913 = get_representative_path_var (parent_sval, visited))
3914 if (const field_region *field_reg = subreg->dyn_cast_field_region ())
3915 return path_var (build3 (COMPONENT_REF,
3916 sval->get_type (),
3917 parent_pv.m_tree,
3918 field_reg->get_field (),
3919 NULL_TREE(tree) nullptr),
3920 parent_pv.m_stack_depth);
3921 }
3922
3923 /* Handle binops. */
3924 if (const binop_svalue *binop_sval = sval->dyn_cast_binop_svalue ())
3925 if (path_var lhs_pv
3926 = get_representative_path_var (binop_sval->get_arg0 (), visited))
3927 if (path_var rhs_pv
3928 = get_representative_path_var (binop_sval->get_arg1 (), visited))
3929 return path_var (build2 (binop_sval->get_op (),
3930 sval->get_type (),
3931 lhs_pv.m_tree, rhs_pv.m_tree),
3932 lhs_pv.m_stack_depth);
3933
3934 if (pvs.length () < 1)
3935 return path_var (NULL_TREE(tree) nullptr, 0);
3936
3937 pvs.qsort (readability_comparator)qsort (readability_comparator);
3938 return pvs[0];
3939}
3940
3941/* Attempt to return a path_var that represents SVAL, or return NULL_TREE.
3942 Use VISITED to prevent infinite mutual recursion with the overload for
3943 regions
3944
3945 This function defers to get_representative_path_var_1 to do the work;
3946 it adds verification that get_representative_path_var_1 returned a tree
3947 of the correct type. */
3948
3949path_var
3950region_model::get_representative_path_var (const svalue *sval,
3951 svalue_set *visited) const
3952{
3953 if (sval == NULLnullptr)
3954 return path_var (NULL_TREE(tree) nullptr, 0);
3955
3956 tree orig_type = sval->get_type ();
3957
3958 path_var result = get_representative_path_var_1 (sval, visited);
3959
3960 /* Verify that the result has the same type as SVAL, if any. */
3961 if (result.m_tree && orig_type)
3962 gcc_assert (TREE_TYPE (result.m_tree) == orig_type)((void)(!(((contains_struct_check ((result.m_tree), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 3962, __FUNCTION__))->typed.type) == orig_type) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 3962, __FUNCTION__), 0 : 0))
;
3963
3964 return result;
3965}
3966
3967/* Attempt to return a tree that represents SVAL, or return NULL_TREE.
3968
3969 Strip off any top-level cast, to avoid messages like
3970 double-free of '(void *)ptr'
3971 from analyzer diagnostics. */
3972
3973tree
3974region_model::get_representative_tree (const svalue *sval) const
3975{
3976 svalue_set visited;
3977 tree expr = get_representative_path_var (sval, &visited).m_tree;
3978
3979 /* Strip off any top-level cast. */
3980 if (expr && TREE_CODE (expr)((enum tree_code) (expr)->base.code) == NOP_EXPR)
3981 expr = TREE_OPERAND (expr, 0)(*((const_cast<tree*> (tree_operand_check ((expr), (0),
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 3981, __FUNCTION__)))))
;
3982
3983 return fixup_tree_for_diagnostic (expr);
3984}
3985
3986tree
3987region_model::get_representative_tree (const region *reg) const
3988{
3989 svalue_set visited;
3990 tree expr = get_representative_path_var (reg, &visited).m_tree;
3991
3992 /* Strip off any top-level cast. */
3993 if (expr && TREE_CODE (expr)((enum tree_code) (expr)->base.code) == NOP_EXPR)
3994 expr = TREE_OPERAND (expr, 0)(*((const_cast<tree*> (tree_operand_check ((expr), (0),
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 3994, __FUNCTION__)))))
;
3995
3996 return fixup_tree_for_diagnostic (expr);
3997}
3998
3999/* Implementation of region_model::get_representative_path_var.
4000
4001 Attempt to return a path_var that represents REG, or return
4002 the NULL path_var.
4003 For example, a region for a field of a local would be a path_var
4004 wrapping a COMPONENT_REF.
4005 Use VISITED to prevent infinite mutual recursion with the overload for
4006 svalues. */
4007
4008path_var
4009region_model::get_representative_path_var_1 (const region *reg,
4010 svalue_set *visited) const
4011{
4012 switch (reg->get_kind ())
4013 {
4014 default:
4015 gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 4015, __FUNCTION__))
;
4016
4017 case RK_FRAME:
4018 case RK_GLOBALS:
4019 case RK_CODE:
4020 case RK_HEAP:
4021 case RK_STACK:
4022 case RK_THREAD_LOCAL:
4023 case RK_ROOT:
4024 /* Regions that represent memory spaces are not expressible as trees. */
4025 return path_var (NULL_TREE(tree) nullptr, 0);
4026
4027 case RK_FUNCTION:
4028 {
4029 const function_region *function_reg
4030 = as_a <const function_region *> (reg);
4031 return path_var (function_reg->get_fndecl (), 0);
4032 }
4033 case RK_LABEL:
4034 {
4035 const label_region *label_reg = as_a <const label_region *> (reg);
4036 return path_var (label_reg->get_label (), 0);
4037 }
4038
4039 case RK_SYMBOLIC:
4040 {
4041 const symbolic_region *symbolic_reg
4042 = as_a <const symbolic_region *> (reg);
4043 const svalue *pointer = symbolic_reg->get_pointer ();
4044 path_var pointer_pv = get_representative_path_var (pointer, visited);
4045 if (!pointer_pv)
4046 return path_var (NULL_TREE(tree) nullptr, 0);
4047 tree offset = build_int_cst (pointer->get_type (), 0);
4048 return path_var (build2 (MEM_REF,
4049 reg->get_type (),
4050 pointer_pv.m_tree,
4051 offset),
4052 pointer_pv.m_stack_depth);
4053 }
4054 case RK_DECL:
4055 {
4056 const decl_region *decl_reg = as_a <const decl_region *> (reg);
4057 return path_var (decl_reg->get_decl (), decl_reg->get_stack_depth ());
4058 }
4059 case RK_FIELD:
4060 {
4061 const field_region *field_reg = as_a <const field_region *> (reg);
4062 path_var parent_pv
4063 = get_representative_path_var (reg->get_parent_region (), visited);
4064 if (!parent_pv)
4065 return path_var (NULL_TREE(tree) nullptr, 0);
4066 return path_var (build3 (COMPONENT_REF,
4067 reg->get_type (),
4068 parent_pv.m_tree,
4069 field_reg->get_field (),
4070 NULL_TREE(tree) nullptr),
4071 parent_pv.m_stack_depth);
4072 }
4073
4074 case RK_ELEMENT:
4075 {
4076 const element_region *element_reg
4077 = as_a <const element_region *> (reg);
4078 path_var parent_pv
4079 = get_representative_path_var (reg->get_parent_region (), visited);
4080 if (!parent_pv)
4081 return path_var (NULL_TREE(tree) nullptr, 0);
4082 path_var index_pv
4083 = get_representative_path_var (element_reg->get_index (), visited);
4084 if (!index_pv)
4085 return path_var (NULL_TREE(tree) nullptr, 0);
4086 return path_var (build4 (ARRAY_REF,
4087 reg->get_type (),
4088 parent_pv.m_tree, index_pv.m_tree,
4089 NULL_TREE(tree) nullptr, NULL_TREE(tree) nullptr),
4090 parent_pv.m_stack_depth);
4091 }
4092
4093 case RK_OFFSET:
4094 {
4095 const offset_region *offset_reg
4096 = as_a <const offset_region *> (reg);
4097 path_var parent_pv
4098 = get_representative_path_var (reg->get_parent_region (), visited);
4099 if (!parent_pv)
4100 return path_var (NULL_TREE(tree) nullptr, 0);
4101 path_var offset_pv
4102 = get_representative_path_var (offset_reg->get_byte_offset (),
4103 visited);
4104 if (!offset_pv || TREE_CODE (offset_pv.m_tree)((enum tree_code) (offset_pv.m_tree)->base.code) != INTEGER_CST)
4105 return path_var (NULL_TREE(tree) nullptr, 0);
4106 tree addr_parent = build1 (ADDR_EXPR,
4107 build_pointer_type (reg->get_type ()),
4108 parent_pv.m_tree);
4109 return path_var (build2 (MEM_REF,
4110 reg->get_type (),
4111 addr_parent, offset_pv.m_tree),
4112 parent_pv.m_stack_depth);
4113 }
4114
4115 case RK_SIZED:
4116 return path_var (NULL_TREE(tree) nullptr, 0);
4117
4118 case RK_CAST:
4119 {
4120 path_var parent_pv
4121 = get_representative_path_var (reg->get_parent_region (), visited);
4122 if (!parent_pv)
4123 return path_var (NULL_TREE(tree) nullptr, 0);
4124 return path_var (build1 (NOP_EXPR,
4125 reg->get_type (),
4126 parent_pv.m_tree),
4127 parent_pv.m_stack_depth);
4128 }
4129
4130 case RK_HEAP_ALLOCATED:
4131 case RK_ALLOCA:
4132 /* No good way to express heap-allocated/alloca regions as trees. */
4133 return path_var (NULL_TREE(tree) nullptr, 0);
4134
4135 case RK_STRING:
4136 {
4137 const string_region *string_reg = as_a <const string_region *> (reg);
4138 return path_var (string_reg->get_string_cst (), 0);
4139 }
4140
4141 case RK_VAR_ARG:
4142 case RK_ERRNO:
4143 case RK_UNKNOWN:
4144 return path_var (NULL_TREE(tree) nullptr, 0);
4145 }
4146}
4147
4148/* Attempt to return a path_var that represents REG, or return
4149 the NULL path_var.
4150 For example, a region for a field of a local would be a path_var
4151 wrapping a COMPONENT_REF.
4152 Use VISITED to prevent infinite mutual recursion with the overload for
4153 svalues.
4154
4155 This function defers to get_representative_path_var_1 to do the work;
4156 it adds verification that get_representative_path_var_1 returned a tree
4157 of the correct type. */
4158
4159path_var
4160region_model::get_representative_path_var (const region *reg,
4161 svalue_set *visited) const
4162{
4163 path_var result = get_representative_path_var_1 (reg, visited);
4164
4165 /* Verify that the result has the same type as REG, if any. */
4166 if (result.m_tree && reg->get_type ())
4167 gcc_assert (TREE_TYPE (result.m_tree) == reg->get_type ())((void)(!(((contains_struct_check ((result.m_tree), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 4167, __FUNCTION__))->typed.type) == reg->get_type ()
) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 4167, __FUNCTION__), 0 : 0))
;
4168
4169 return result;
4170}
4171
4172/* Update this model for any phis in SNODE, assuming we came from
4173 LAST_CFG_SUPEREDGE. */
4174
4175void
4176region_model::update_for_phis (const supernode *snode,
4177 const cfg_superedge *last_cfg_superedge,
4178 region_model_context *ctxt)
4179{
4180 gcc_assert (last_cfg_superedge)((void)(!(last_cfg_superedge) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 4180, __FUNCTION__), 0 : 0))
;
4181
4182 /* Copy this state and pass it to handle_phi so that all of the phi stmts
4183 are effectively handled simultaneously. */
4184 const region_model old_state (*this);
4185
4186 for (gphi_iterator gpi = const_cast<supernode *>(snode)->start_phis ();
4187 !gsi_end_p (gpi); gsi_next (&gpi))
4188 {
4189 gphi *phi = gpi.phi ();
4190
4191 tree src = last_cfg_superedge->get_phi_arg (phi);
4192 tree lhs = gimple_phi_result (phi);
4193
4194 /* Update next_state based on phi and old_state. */
4195 handle_phi (phi, lhs, src, old_state, ctxt);
4196 }
4197}
4198
4199/* Attempt to update this model for taking EDGE (where the last statement
4200 was LAST_STMT), returning true if the edge can be taken, false
4201 otherwise.
4202 When returning false, if OUT is non-NULL, write a new rejected_constraint
4203 to it.
4204
4205 For CFG superedges where LAST_STMT is a conditional or a switch
4206 statement, attempt to add the relevant conditions for EDGE to this
4207 model, returning true if they are feasible, or false if they are
4208 impossible.
4209
4210 For call superedges, push frame information and store arguments
4211 into parameters.
4212
4213 For return superedges, pop frame information and store return
4214 values into any lhs.
4215
4216 Rejection of call/return superedges happens elsewhere, in
4217 program_point::on_edge (i.e. based on program point, rather
4218 than program state). */
4219
4220bool
4221region_model::maybe_update_for_edge (const superedge &edge,
4222 const gimple *last_stmt,
4223 region_model_context *ctxt,
4224 rejected_constraint **out)
4225{
4226 /* Handle frame updates for interprocedural edges. */
4227 switch (edge.m_kind)
4228 {
4229 default:
4230 break;
4231
4232 case SUPEREDGE_CALL:
4233 {
4234 const call_superedge *call_edge = as_a <const call_superedge *> (&edge);
4235 update_for_call_superedge (*call_edge, ctxt);
4236 }
4237 break;
4238
4239 case SUPEREDGE_RETURN:
4240 {
4241 const return_superedge *return_edge
4242 = as_a <const return_superedge *> (&edge);
4243 update_for_return_superedge (*return_edge, ctxt);
4244 }
4245 break;
4246
4247 case SUPEREDGE_INTRAPROCEDURAL_CALL:
4248 /* This is a no-op for call summaries; we should already
4249 have handled the effect of the call summary at the call stmt. */
4250 break;
4251 }
4252
4253 if (last_stmt == NULLnullptr)
4254 return true;
4255
4256 /* Apply any constraints for conditionals/switch statements. */
4257
4258 if (const gcond *cond_stmt = dyn_cast <const gcond *> (last_stmt))
4259 {
4260 const cfg_superedge *cfg_sedge = as_a <const cfg_superedge *> (&edge);
4261 return apply_constraints_for_gcond (*cfg_sedge, cond_stmt, ctxt, out);
4262 }
4263
4264 if (const gswitch *switch_stmt = dyn_cast <const gswitch *> (last_stmt))
4265 {
4266 const switch_cfg_superedge *switch_sedge
4267 = as_a <const switch_cfg_superedge *> (&edge);
4268 return apply_constraints_for_gswitch (*switch_sedge, switch_stmt,
4269 ctxt, out);
4270 }
4271
4272 /* Apply any constraints due to an exception being thrown. */
4273 if (const cfg_superedge *cfg_sedge = dyn_cast <const cfg_superedge *> (&edge))
4274 if (cfg_sedge->get_flags () & EDGE_EH)
4275 return apply_constraints_for_exception (last_stmt, ctxt, out);
4276
4277 return true;
4278}
4279
4280/* Push a new frame_region on to the stack region.
4281 Populate the frame_region with child regions for the function call's
4282 parameters, using values from the arguments at the callsite in the
4283 caller's frame. */
4284
4285void
4286region_model::update_for_gcall (const gcall *call_stmt,
4287 region_model_context *ctxt,
4288 function *callee)
4289{
4290 /* Build a vec of argument svalues, using the current top
4291 frame for resolving tree expressions. */
4292 auto_vec<const svalue *> arg_svals (gimple_call_num_args (call_stmt));
4293
4294 for (unsigned i = 0; i < gimple_call_num_args (call_stmt); i++)
4295 {
4296 tree arg = gimple_call_arg (call_stmt, i);
4297 arg_svals.quick_push (get_rvalue (arg, ctxt));
4298 }
4299
4300 if(!callee)
4301 {
4302 /* Get the function * from the gcall. */
4303 tree fn_decl = get_fndecl_for_call (call_stmt,ctxt);
4304 callee = DECL_STRUCT_FUNCTION (fn_decl)((tree_check ((fn_decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 4304, __FUNCTION__, (FUNCTION_DECL)))->function_decl.f)
;
4305 }
4306
4307 push_frame (callee, &arg_svals, ctxt);
4308}
4309
4310/* Pop the top-most frame_region from the stack, and copy the return
4311 region's values (if any) into the region for the lvalue of the LHS of
4312 the call (if any). */
4313
4314void
4315region_model::update_for_return_gcall (const gcall *call_stmt,
4316 region_model_context *ctxt)
4317{
4318 /* Get the lvalue for the result of the call, passing it to pop_frame,
4319 so that pop_frame can determine the region with respect to the
4320 *caller* frame. */
4321 tree lhs = gimple_call_lhs (call_stmt);
4322 pop_frame (lhs, NULLnullptr, ctxt);
4323}
4324
4325/* Extract calling information from the superedge and update the model for the
4326 call */
4327
4328void
4329region_model::update_for_call_superedge (const call_superedge &call_edge,
4330 region_model_context *ctxt)
4331{
4332 const gcall *call_stmt = call_edge.get_call_stmt ();
4333 update_for_gcall (call_stmt, ctxt, call_edge.get_callee_function ());
4334}
4335
4336/* Extract calling information from the return superedge and update the model
4337 for the returning call */
4338
4339void
4340region_model::update_for_return_superedge (const return_superedge &return_edge,
4341 region_model_context *ctxt)
4342{
4343 const gcall *call_stmt = return_edge.get_call_stmt ();
4344 update_for_return_gcall (call_stmt, ctxt);
4345}
4346
4347/* Attempt to to use R to replay SUMMARY into this object.
4348 Return true if it is possible. */
4349
4350bool
4351region_model::replay_call_summary (call_summary_replay &r,
4352 const region_model &summary)
4353{
4354 gcc_assert (summary.get_stack_depth () == 1)((void)(!(summary.get_stack_depth () == 1) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 4354, __FUNCTION__), 0 : 0))
;
4355
4356 m_store.replay_call_summary (r, summary.m_store);
4357
4358 if (!m_constraints->replay_call_summary (r, *summary.m_constraints))
4359 return false;
4360
4361 for (auto kv : summary.m_dynamic_extents)
4362 {
4363 const region *summary_reg = kv.first;
4364 const region *caller_reg = r.convert_region_from_summary (summary_reg);
4365 if (!caller_reg)
4366 continue;
4367 const svalue *summary_sval = kv.second;
4368 const svalue *caller_sval = r.convert_svalue_from_summary (summary_sval);
4369 if (!caller_sval)
4370 continue;
4371 m_dynamic_extents.put (caller_reg, caller_sval);
4372 }
4373
4374 return true;
4375}
4376
4377/* Given a true or false edge guarded by conditional statement COND_STMT,
4378 determine appropriate constraints for the edge to be taken.
4379
4380 If they are feasible, add the constraints and return true.
4381
4382 Return false if the constraints contradict existing knowledge
4383 (and so the edge should not be taken).
4384 When returning false, if OUT is non-NULL, write a new rejected_constraint
4385 to it. */
4386
4387bool
4388region_model::apply_constraints_for_gcond (const cfg_superedge &sedge,
4389 const gcond *cond_stmt,
4390 region_model_context *ctxt,
4391 rejected_constraint **out)
4392{
4393 ::edge cfg_edge = sedge.get_cfg_edge ();
4394 gcc_assert (cfg_edge != NULL)((void)(!(cfg_edge != nullptr) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 4394, __FUNCTION__), 0 : 0))
;
4395 gcc_assert (cfg_edge->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))((void)(!(cfg_edge->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE
)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 4395, __FUNCTION__), 0 : 0))
;
4396
4397 enum tree_code op = gimple_cond_code (cond_stmt);
4398 tree lhs = gimple_cond_lhs (cond_stmt);
4399 tree rhs = gimple_cond_rhs (cond_stmt);
4400 if (cfg_edge->flags & EDGE_FALSE_VALUE)
4401 op = invert_tree_comparison (op, false /* honor_nans */);
4402 return add_constraint (lhs, op, rhs, ctxt, out);
4403}
4404
4405/* Return true iff SWITCH_STMT has a non-default label that contains
4406 INT_CST. */
4407
4408static bool
4409has_nondefault_case_for_value_p (const gswitch *switch_stmt, tree int_cst)
4410{
4411 /* We expect the initial label to be the default; skip it. */
4412 gcc_assert (CASE_LOW (gimple_switch_label (switch_stmt, 0)) == NULL)((void)(!((*((const_cast<tree*> (tree_operand_check (((
tree_check ((gimple_switch_label (switch_stmt, 0)), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 4412, __FUNCTION__, (CASE_LABEL_EXPR)))), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 4412, __FUNCTION__))))) == nullptr) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 4412, __FUNCTION__), 0 : 0))
;
4413 unsigned min_idx = 1;
4414 unsigned max_idx = gimple_switch_num_labels (switch_stmt) - 1;
4415
4416 /* Binary search: try to find the label containing INT_CST.
4417 This requires the cases to be sorted by CASE_LOW (done by the
4418 gimplifier). */
4419 while (max_idx >= min_idx)
4420 {
4421 unsigned case_idx = (min_idx + max_idx) / 2;
4422 tree label = gimple_switch_label (switch_stmt, case_idx);
4423 tree low = CASE_LOW (label)(*((const_cast<tree*> (tree_operand_check (((tree_check
((label), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 4423, __FUNCTION__, (CASE_LABEL_EXPR)))), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 4423, __FUNCTION__)))))
;
4424 gcc_assert (low)((void)(!(low) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 4424, __FUNCTION__), 0 : 0))
;
4425 tree high = CASE_HIGH (label)(*((const_cast<tree*> (tree_operand_check (((tree_check
((label), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 4425, __FUNCTION__, (CASE_LABEL_EXPR)))), (1), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 4425, __FUNCTION__)))))
;
4426 if (!high)
4427 high = low;
4428 if (tree_int_cst_compare (int_cst, low) < 0)
4429 {
4430 /* INT_CST is below the range of this label. */
4431 gcc_assert (case_idx > 0)((void)(!(case_idx > 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 4431, __FUNCTION__), 0 : 0))
;
4432 max_idx = case_idx - 1;
4433 }
4434 else if (tree_int_cst_compare (int_cst, high) > 0)
4435 {
4436 /* INT_CST is above the range of this case. */
4437 min_idx = case_idx + 1;
4438 }
4439 else
4440 /* This case contains INT_CST. */
4441 return true;
4442 }
4443 /* Not found. */
4444 return false;
4445}
4446
4447/* Return true iff SWITCH_STMT (which must be on an enum value)
4448 has nondefault cases handling all values in the enum. */
4449
4450static bool
4451has_nondefault_cases_for_all_enum_values_p (const gswitch *switch_stmt)
4452{
4453 gcc_assert (switch_stmt)((void)(!(switch_stmt) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 4453, __FUNCTION__), 0 : 0))
;
4454 tree type = TREE_TYPE (gimple_switch_index (switch_stmt))((contains_struct_check ((gimple_switch_index (switch_stmt)),
(TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 4454, __FUNCTION__))->typed.type)
;
4455 gcc_assert (TREE_CODE (type) == ENUMERAL_TYPE)((void)(!(((enum tree_code) (type)->base.code) == ENUMERAL_TYPE
) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 4455, __FUNCTION__), 0 : 0))
;
4456
4457 for (tree enum_val_iter = TYPE_VALUES (type)((tree_check ((type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 4457, __FUNCTION__, (ENUMERAL_TYPE)))->type_non_common.values
)
;
4458 enum_val_iter;
4459 enum_val_iter = TREE_CHAIN (enum_val_iter)((contains_struct_check ((enum_val_iter), (TS_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 4459, __FUNCTION__))->common.chain)
)
4460 {
4461 tree enum_val = TREE_VALUE (enum_val_iter)((tree_check ((enum_val_iter), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 4461, __FUNCTION__, (TREE_LIST)))->list.value)
;
4462 gcc_assert (TREE_CODE (enum_val) == CONST_DECL)((void)(!(((enum tree_code) (enum_val)->base.code) == CONST_DECL
) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 4462, __FUNCTION__), 0 : 0))
;
4463 gcc_assert (TREE_CODE (DECL_INITIAL (enum_val)) == INTEGER_CST)((void)(!(((enum tree_code) (((contains_struct_check ((enum_val
), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 4463, __FUNCTION__))->decl_common.initial))->base.code
) == INTEGER_CST) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 4463, __FUNCTION__), 0 : 0))
;
4464 if (!has_nondefault_case_for_value_p (switch_stmt,
4465 DECL_INITIAL (enum_val)((contains_struct_check ((enum_val), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 4465, __FUNCTION__))->decl_common.initial)
))
4466 return false;
4467 }
4468 return true;
4469}
4470
4471/* Given an EDGE guarded by SWITCH_STMT, determine appropriate constraints
4472 for the edge to be taken.
4473
4474 If they are feasible, add the constraints and return true.
4475
4476 Return false if the constraints contradict existing knowledge
4477 (and so the edge should not be taken).
4478 When returning false, if OUT is non-NULL, write a new rejected_constraint
4479 to it. */
4480
4481bool
4482region_model::apply_constraints_for_gswitch (const switch_cfg_superedge &edge,
4483 const gswitch *switch_stmt,
4484 region_model_context *ctxt,
4485 rejected_constraint **out)
4486{
4487 tree index = gimple_switch_index (switch_stmt);
4488 const svalue *index_sval = get_rvalue (index, ctxt);
4489
4490 /* If we're switching based on an enum type, assume that the user is only
4491 working with values from the enum. Hence if this is an
4492 implicitly-created "default", assume it doesn't get followed.
4493 This fixes numerous "uninitialized" false positives where we otherwise
4494 consider jumping past the initialization cases. */
4495
4496 if (/* Don't check during feasibility-checking (when ctxt is NULL). */
4497 ctxt
4498 /* Must be an enum value. */
4499 && index_sval->get_type ()
4500 && TREE_CODE (TREE_TYPE (index))((enum tree_code) (((contains_struct_check ((index), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 4500, __FUNCTION__))->typed.type))->base.code)
== ENUMERAL_TYPE
4501 && TREE_CODE (index_sval->get_type ())((enum tree_code) (index_sval->get_type ())->base.code) == ENUMERAL_TYPE
4502 /* If we have a constant, then we can check it directly. */
4503 && index_sval->get_kind () != SK_CONSTANT
4504 && edge.implicitly_created_default_p ()
4505 && has_nondefault_cases_for_all_enum_values_p (switch_stmt)
4506 /* Don't do this if there's a chance that the index is
4507 attacker-controlled. */
4508 && !ctxt->possibly_tainted_p (index_sval))
4509 {
4510 if (out)
4511 *out = new rejected_default_case (*this);
4512 return false;
4513 }
4514
4515 bounded_ranges_manager *ranges_mgr = get_range_manager ();
4516 const bounded_ranges *all_cases_ranges
4517 = ranges_mgr->get_or_create_ranges_for_switch (&edge, switch_stmt);
4518 bool sat = m_constraints->add_bounded_ranges (index_sval, all_cases_ranges);
4519 if (!sat && out)
4520 *out = new rejected_ranges_constraint (*this, index, all_cases_ranges);
4521 if (sat && ctxt && !all_cases_ranges->empty_p ())
4522 ctxt->on_bounded_ranges (*index_sval, *all_cases_ranges);
4523 return sat;
4524}
4525
4526/* Apply any constraints due to an exception being thrown at LAST_STMT.
4527
4528 If they are feasible, add the constraints and return true.
4529
4530 Return false if the constraints contradict existing knowledge
4531 (and so the edge should not be taken).
4532 When returning false, if OUT is non-NULL, write a new rejected_constraint
4533 to it. */
4534
4535bool
4536region_model::apply_constraints_for_exception (const gimple *last_stmt,
4537 region_model_context *ctxt,
4538 rejected_constraint **out)
4539{
4540 gcc_assert (last_stmt)((void)(!(last_stmt) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 4540, __FUNCTION__), 0 : 0))
;
4541 if (const gcall *call = dyn_cast <const gcall *> (last_stmt))
4542 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
4543 if (is_named_call_p (callee_fndecl, "operator new", call, 1)
4544 || is_named_call_p (callee_fndecl, "operator new []", call, 1))
4545 {
4546 /* We have an exception thrown from operator new.
4547 Add a constraint that the result was NULL, to avoid a false
4548 leak report due to the result being lost when following
4549 the EH edge. */
4550 if (tree lhs = gimple_call_lhs (call))
4551 return add_constraint (lhs, EQ_EXPR, null_pointer_nodeglobal_trees[TI_NULL_POINTER], ctxt, out);
4552 return true;
4553 }
4554 return true;
4555}
4556
4557/* For use with push_frame when handling a top-level call within the analysis.
4558 PARAM has a defined but unknown initial value.
4559 Anything it points to has escaped, since the calling context "knows"
4560 the pointer, and thus calls to unknown functions could read/write into
4561 the region.
4562 If NONNULL is true, then assume that PARAM must be non-NULL. */
4563
4564void
4565region_model::on_top_level_param (tree param,
4566 bool nonnull,
4567 region_model_context *ctxt)
4568{
4569 if (POINTER_TYPE_P (TREE_TYPE (param))(((enum tree_code) (((contains_struct_check ((param), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 4569, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((param), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 4569, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
)
4570 {
4571 const region *param_reg = get_lvalue (param, ctxt);
4572 const svalue *init_ptr_sval
4573 = m_mgr->get_or_create_initial_value (param_reg);
4574 const region *pointee_reg = m_mgr->get_symbolic_region (init_ptr_sval);
4575 m_store.mark_as_escaped (pointee_reg);
4576 if (nonnull)
4577 {
4578 const svalue *null_ptr_sval
4579 = m_mgr->get_or_create_null_ptr (TREE_TYPE (param)((contains_struct_check ((param), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 4579, __FUNCTION__))->typed.type)
);
4580 add_constraint (init_ptr_sval, NE_EXPR, null_ptr_sval, ctxt);
4581 }
4582 }
4583}
4584
4585/* Update this region_model to reflect pushing a frame onto the stack
4586 for a call to FUN.
4587
4588 If ARG_SVALS is non-NULL, use it to populate the parameters
4589 in the new frame.
4590 Otherwise, the params have their initial_svalues.
4591
4592 Return the frame_region for the new frame. */
4593
4594const region *
4595region_model::push_frame (function *fun, const vec<const svalue *> *arg_svals,
4596 region_model_context *ctxt)
4597{
4598 m_current_frame = m_mgr->get_frame_region (m_current_frame, fun);
4599 if (arg_svals)
4600 {
4601 /* Arguments supplied from a caller frame. */
4602 tree fndecl = fun->decl;
4603 unsigned idx = 0;
4604 for (tree iter_parm = DECL_ARGUMENTS (fndecl)((tree_check ((fndecl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 4604, __FUNCTION__, (FUNCTION_DECL)))->function_decl.arguments
)
; iter_parm;
4605 iter_parm = DECL_CHAIN (iter_parm)(((contains_struct_check (((contains_struct_check ((iter_parm
), (TS_DECL_MINIMAL), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 4605, __FUNCTION__))), (TS_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 4605, __FUNCTION__))->common.chain))
, ++idx)
4606 {
4607 /* If there's a mismatching declaration, the call stmt might
4608 not have enough args. Handle this case by leaving the
4609 rest of the params as uninitialized. */
4610 if (idx >= arg_svals->length ())
4611 break;
4612 tree parm_lval = iter_parm;
4613 if (tree parm_default_ssa = ssa_default_def (fun, iter_parm))
4614 parm_lval = parm_default_ssa;
4615 const region *parm_reg = get_lvalue (parm_lval, ctxt);
4616 const svalue *arg_sval = (*arg_svals)[idx];
4617 set_value (parm_reg, arg_sval, ctxt);
4618 }
4619
4620 /* Handle any variadic args. */
4621 unsigned va_arg_idx = 0;
4622 for (; idx < arg_svals->length (); idx++, va_arg_idx++)
4623 {
4624 const svalue *arg_sval = (*arg_svals)[idx];
4625 const region *var_arg_reg
4626 = m_mgr->get_var_arg_region (m_current_frame,
4627 va_arg_idx);
4628 set_value (var_arg_reg, arg_sval, ctxt);
4629 }
4630 }
4631 else
4632 {
4633 /* Otherwise we have a top-level call within the analysis. The params
4634 have defined but unknown initial values.
4635 Anything they point to has escaped. */
4636 tree fndecl = fun->decl;
4637
4638 /* Handle "__attribute__((nonnull))". */
4639 tree fntype = TREE_TYPE (fndecl)((contains_struct_check ((fndecl), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 4639, __FUNCTION__))->typed.type)
;
4640 bitmap nonnull_args = get_nonnull_args (fntype);
4641
4642 unsigned parm_idx = 0;
4643 for (tree iter_parm = DECL_ARGUMENTS (fndecl)((tree_check ((fndecl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 4643, __FUNCTION__, (FUNCTION_DECL)))->function_decl.arguments
)
; iter_parm;
4644 iter_parm = DECL_CHAIN (iter_parm)(((contains_struct_check (((contains_struct_check ((iter_parm
), (TS_DECL_MINIMAL), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 4644, __FUNCTION__))), (TS_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 4644, __FUNCTION__))->common.chain))
)
4645 {
4646 bool non_null = (nonnull_args
4647 ? (bitmap_empty_p (nonnull_args)
4648 || bitmap_bit_p (nonnull_args, parm_idx))
4649 : false);
4650 if (tree parm_default_ssa = ssa_default_def (fun, iter_parm))
4651 on_top_level_param (parm_default_ssa, non_null, ctxt);
4652 else
4653 on_top_level_param (iter_parm, non_null, ctxt);
4654 parm_idx++;
4655 }
4656
4657 BITMAP_FREE (nonnull_args)((void) (bitmap_obstack_free ((bitmap) nonnull_args), (nonnull_args
) = (bitmap) nullptr))
;
4658 }
4659
4660 return m_current_frame;
4661}
4662
4663/* Get the function of the top-most frame in this region_model's stack.
4664 There must be such a frame. */
4665
4666function *
4667region_model::get_current_function () const
4668{
4669 const frame_region *frame = get_current_frame ();
4670 gcc_assert (frame)((void)(!(frame) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 4670, __FUNCTION__), 0 : 0))
;
4671 return frame->get_function ();
4672}
4673
4674/* Pop the topmost frame_region from this region_model's stack;
4675
4676 If RESULT_LVALUE is non-null, copy any return value from the frame
4677 into the corresponding region (evaluated with respect to the *caller*
4678 frame, rather than the called frame).
4679 If OUT_RESULT is non-null, copy any return value from the frame
4680 into *OUT_RESULT.
4681
4682 If EVAL_RETURN_SVALUE is false, then don't evaluate the return value.
4683 This is for use when unwinding frames e.g. due to longjmp, to suppress
4684 erroneously reporting uninitialized return values.
4685
4686 Purge the frame region and all its descendent regions.
4687 Convert any pointers that point into such regions into
4688 POISON_KIND_POPPED_STACK svalues. */
4689
4690void
4691region_model::pop_frame (tree result_lvalue,
4692 const svalue **out_result,
4693 region_model_context *ctxt,
4694 bool eval_return_svalue)
4695{
4696 gcc_assert (m_current_frame)((void)(!(m_current_frame) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 4696, __FUNCTION__), 0 : 0))
;
4697
4698 const frame_region *frame_reg = m_current_frame;
4699
4700 /* Notify state machines. */
4701 if (ctxt)
4702 ctxt->on_pop_frame (frame_reg);
4703
4704 /* Evaluate the result, within the callee frame. */
4705 tree fndecl = m_current_frame->get_function ()->decl;
4706 tree result = DECL_RESULT (fndecl)((tree_check ((fndecl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 4706, __FUNCTION__, (FUNCTION_DECL)))->decl_non_common.result
)
;
4707 const svalue *retval = NULLnullptr;
4708 if (result
4709 && TREE_TYPE (result)((contains_struct_check ((result), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 4709, __FUNCTION__))->typed.type)
!= void_type_nodeglobal_trees[TI_VOID_TYPE]
4710 && eval_return_svalue)
4711 {
4712 retval = get_rvalue (result, ctxt);
4713 if (out_result)
4714 *out_result = retval;
4715 }
4716
4717 /* Pop the frame. */
4718 m_current_frame = m_current_frame->get_calling_frame ();
4719
4720 if (result_lvalue && retval)
4721 {
4722 gcc_assert (eval_return_svalue)((void)(!(eval_return_svalue) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 4722, __FUNCTION__), 0 : 0))
;
4723
4724 /* Compute result_dst_reg using RESULT_LVALUE *after* popping
4725 the frame, but before poisoning pointers into the old frame. */
4726 const region *result_dst_reg = get_lvalue (result_lvalue, ctxt);
4727 set_value (result_dst_reg, retval, ctxt);
4728 }
4729
4730 unbind_region_and_descendents (frame_reg,POISON_KIND_POPPED_STACK);
4731}
4732
4733/* Get the number of frames in this region_model's stack. */
4734
4735int
4736region_model::get_stack_depth () const
4737{
4738 const frame_region *frame = get_current_frame ();
4739 if (frame)
4740 return frame->get_stack_depth ();
4741 else
4742 return 0;
4743}
4744
4745/* Get the frame_region with the given index within the stack.
4746 The frame_region must exist. */
4747
4748const frame_region *
4749region_model::get_frame_at_index (int index) const
4750{
4751 const frame_region *frame = get_current_frame ();
4752 gcc_assert (frame)((void)(!(frame) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 4752, __FUNCTION__), 0 : 0))
;
4753 gcc_assert (index >= 0)((void)(!(index >= 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 4753, __FUNCTION__), 0 : 0))
;
4754 gcc_assert (index <= frame->get_index ())((void)(!(index <= frame->get_index ()) ? fancy_abort (
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 4754, __FUNCTION__), 0 : 0))
;
4755 while (index != frame->get_index ())
4756 {
4757 frame = frame->get_calling_frame ();
4758 gcc_assert (frame)((void)(!(frame) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 4758, __FUNCTION__), 0 : 0))
;
4759 }
4760 return frame;
4761}
4762
4763/* Unbind svalues for any regions in REG and below.
4764 Find any pointers to such regions; convert them to
4765 poisoned values of kind PKIND.
4766 Also purge any dynamic extents. */
4767
4768void
4769region_model::unbind_region_and_descendents (const region *reg,
4770 enum poison_kind pkind)
4771{
4772 /* Gather a set of base regions to be unbound. */
4773 hash_set<const region *> base_regs;
4774 for (store::cluster_map_t::iterator iter = m_store.begin ();
4775 iter != m_store.end (); ++iter)
4776 {
4777 const region *iter_base_reg = (*iter).first;
4778 if (iter_base_reg->descendent_of_p (reg))
4779 base_regs.add (iter_base_reg);
4780 }
4781 for (hash_set<const region *>::iterator iter = base_regs.begin ();
4782 iter != base_regs.end (); ++iter)
4783 m_store.purge_cluster (*iter);
4784
4785 /* Find any pointers to REG or its descendents; convert to poisoned. */
4786 poison_any_pointers_to_descendents (reg, pkind);
4787
4788 /* Purge dynamic extents of any base regions in REG and below
4789 (e.g. VLAs and alloca stack regions). */
4790 for (auto iter : m_dynamic_extents)
4791 {
4792 const region *iter_reg = iter.first;
4793 if (iter_reg->descendent_of_p (reg))
4794 unset_dynamic_extents (iter_reg);
4795 }
4796}
4797
4798/* Implementation of BindingVisitor.
4799 Update the bound svalues for regions below REG to use poisoned
4800 values instead. */
4801
4802struct bad_pointer_finder
4803{
4804 bad_pointer_finder (const region *reg, enum poison_kind pkind,
4805 region_model_manager *mgr)
4806 : m_reg (reg), m_pkind (pkind), m_mgr (mgr), m_count (0)
4807 {}
4808
4809 void on_binding (const binding_key *, const svalue *&sval)
4810 {
4811 if (const region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
4812 {
4813 const region *ptr_dst = ptr_sval->get_pointee ();
4814 /* Poison ptrs to descendents of REG, but not to REG itself,
4815 otherwise double-free detection doesn't work (since sm-state
4816 for "free" is stored on the original ptr svalue). */
4817 if (ptr_dst->descendent_of_p (m_reg)
4818 && ptr_dst != m_reg)
4819 {
4820 sval = m_mgr->get_or_create_poisoned_svalue (m_pkind,
4821 sval->get_type ());
4822 ++m_count;
4823 }
4824 }
4825 }
4826
4827 const region *m_reg;
4828 enum poison_kind m_pkind;
4829 region_model_manager *const m_mgr;
4830 int m_count;
4831};
4832
4833/* Find any pointers to REG or its descendents; convert them to
4834 poisoned values of kind PKIND.
4835 Return the number of pointers that were poisoned. */
4836
4837int
4838region_model::poison_any_pointers_to_descendents (const region *reg,
4839 enum poison_kind pkind)
4840{
4841 bad_pointer_finder bv (reg, pkind, m_mgr);
4842 m_store.for_each_binding (bv);
4843 return bv.m_count;
4844}
4845
4846/* Attempt to merge THIS with OTHER_MODEL, writing the result
4847 to OUT_MODEL. Use POINT to distinguish values created as a
4848 result of merging. */
4849
4850bool
4851region_model::can_merge_with_p (const region_model &other_model,
4852 const program_point &point,
4853 region_model *out_model,
4854 const extrinsic_state *ext_state,
4855 const program_state *state_a,
4856 const program_state *state_b) const
4857{
4858 gcc_assert (out_model)((void)(!(out_model) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 4858, __FUNCTION__), 0 : 0))
;
4859 gcc_assert (m_mgr == other_model.m_mgr)((void)(!(m_mgr == other_model.m_mgr) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 4859, __FUNCTION__), 0 : 0))
;
4860 gcc_assert (m_mgr == out_model->m_mgr)((void)(!(m_mgr == out_model->m_mgr) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 4860, __FUNCTION__), 0 : 0))
;
4861
4862 if (m_current_frame != other_model.m_current_frame)
4863 return false;
4864 out_model->m_current_frame = m_current_frame;
4865
4866 model_merger m (this, &other_model, point, out_model,
4867 ext_state, state_a, state_b);
4868
4869 if (!store::can_merge_p (&m_store, &other_model.m_store,
4870 &out_model->m_store, m_mgr->get_store_manager (),
4871 &m))
4872 return false;
4873
4874 if (!m_dynamic_extents.can_merge_with_p (other_model.m_dynamic_extents,
4875 &out_model->m_dynamic_extents))
4876 return false;
4877
4878 /* Merge constraints. */
4879 constraint_manager::merge (*m_constraints,
4880 *other_model.m_constraints,
4881 out_model->m_constraints);
4882
4883 return true;
4884}
4885
4886/* Attempt to get the fndecl used at CALL, if known, or NULL_TREE
4887 otherwise. */
4888
4889tree
4890region_model::get_fndecl_for_call (const gcall *call,
4891 region_model_context *ctxt)
4892{
4893 tree fn_ptr = gimple_call_fn (call);
4894 if (fn_ptr == NULL_TREE(tree) nullptr)
4895 return NULL_TREE(tree) nullptr;
4896 const svalue *fn_ptr_sval = get_rvalue (fn_ptr, ctxt);
4897 if (const region_svalue *fn_ptr_ptr
4898 = fn_ptr_sval->dyn_cast_region_svalue ())
4899 {
4900 const region *reg = fn_ptr_ptr->get_pointee ();
4901 if (const function_region *fn_reg = reg->dyn_cast_function_region ())
4902 {
4903 tree fn_decl = fn_reg->get_fndecl ();
4904 cgraph_node *node = cgraph_node::get (fn_decl);
4905 if (!node)
4906 return NULL_TREE(tree) nullptr;
4907 const cgraph_node *ultimate_node = node->ultimate_alias_target ();
4908 if (ultimate_node)
4909 return ultimate_node->decl;
4910 }
4911 }
4912
4913 return NULL_TREE(tree) nullptr;
4914}
4915
4916/* Would be much simpler to use a lambda here, if it were supported. */
4917
4918struct append_regions_cb_data
4919{
4920 const region_model *model;
4921 auto_vec<const decl_region *> *out;
4922};
4923
4924/* Populate *OUT with all decl_regions in the current
4925 frame that have clusters within the store. */
4926
4927void
4928region_model::
4929get_regions_for_current_frame (auto_vec<const decl_region *> *out) const
4930{
4931 append_regions_cb_data data;
4932 data.model = this;
4933 data.out = out;
4934 m_store.for_each_cluster (append_regions_cb, &data);
4935}
4936
4937/* Implementation detail of get_regions_for_current_frame. */
4938
4939void
4940region_model::append_regions_cb (const region *base_reg,
4941 append_regions_cb_data *cb_data)
4942{
4943 if (base_reg->get_parent_region () != cb_data->model->m_current_frame)
4944 return;
4945 if (const decl_region *decl_reg = base_reg->dyn_cast_decl_region ())
4946 cb_data->out->safe_push (decl_reg);
4947}
4948
4949
4950/* Abstract class for diagnostics related to the use of
4951 floating-point arithmetic where precision is needed. */
4952
4953class imprecise_floating_point_arithmetic : public pending_diagnostic
4954{
4955public:
4956 int get_controlling_option () const final override
4957 {
4958 return OPT_Wanalyzer_imprecise_fp_arithmetic;
4959 }
4960};
4961
4962/* Concrete diagnostic to complain about uses of floating-point arithmetic
4963 in the size argument of malloc etc. */
4964
4965class float_as_size_arg : public imprecise_floating_point_arithmetic
4966{
4967public:
4968 float_as_size_arg (tree arg) : m_arg (arg)
4969 {}
4970
4971 const char *get_kind () const final override
4972 {
4973 return "float_as_size_arg_diagnostic";
4974 }
4975
4976 bool subclass_equal_p (const pending_diagnostic &other) const final override
4977 {
4978 return same_tree_p (m_arg, ((const float_as_size_arg &) other).m_arg);
4979 }
4980
4981 bool emit (rich_location *rich_loc) final override
4982 {
4983 diagnostic_metadata m;
4984 bool warned = warning_meta (rich_loc, m, get_controlling_option (),
4985 "use of floating-point arithmetic here might"
4986 " yield unexpected results");
4987 if (warned)
4988 inform (rich_loc->get_loc (), "only use operands of an integer type"
4989 " inside the size argument");
4990 return warned;
4991 }
4992
4993 label_text describe_final_event (const evdesc::final_event &ev) final
4994 override
4995 {
4996 if (m_arg)
4997 return ev.formatted_print ("operand %qE is of type %qT",
4998 m_arg, TREE_TYPE (m_arg)((contains_struct_check ((m_arg), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 4998, __FUNCTION__))->typed.type)
);
4999 return ev.formatted_print ("at least one operand of the size argument is"
5000 " of a floating-point type");
5001 }
5002
5003private:
5004 tree m_arg;
5005};
5006
5007/* Visitor to find uses of floating-point variables/constants in an svalue. */
5008
5009class contains_floating_point_visitor : public visitor
5010{
5011public:
5012 contains_floating_point_visitor (const svalue *root_sval) : m_result (NULLnullptr)
5013 {
5014 root_sval->accept (this);
5015 }
5016
5017 const svalue *get_svalue_to_report ()
5018 {
5019 return m_result;
5020 }
5021
5022 void visit_constant_svalue (const constant_svalue *sval) final override
5023 {
5024 /* At the point the analyzer runs, constant integer operands in a floating
5025 point expression are already implictly converted to floating-points.
5026 Thus, we do prefer to report non-constants such that the diagnostic
5027 always reports a floating-point operand. */
5028 tree type = sval->get_type ();
5029 if (type && FLOAT_TYPE_P (type)((((enum tree_code) (type)->base.code) == REAL_TYPE) || ((
((enum tree_code) (type)->base.code) == COMPLEX_TYPE || ((
(enum tree_code) (type)->base.code) == VECTOR_TYPE)) &&
(((enum tree_code) (((contains_struct_check ((type), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 5029, __FUNCTION__))->typed.type))->base.code) == REAL_TYPE
)))
&& !m_result)
5030 m_result = sval;
5031 }
5032
5033 void visit_conjured_svalue (const conjured_svalue *sval) final override
5034 {
5035 tree type = sval->get_type ();
5036 if (type && FLOAT_TYPE_P (type)((((enum tree_code) (type)->base.code) == REAL_TYPE) || ((
((enum tree_code) (type)->base.code) == COMPLEX_TYPE || ((
(enum tree_code) (type)->base.code) == VECTOR_TYPE)) &&
(((enum tree_code) (((contains_struct_check ((type), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 5036, __FUNCTION__))->typed.type))->base.code) == REAL_TYPE
)))
)
5037 m_result = sval;
5038 }
5039
5040 void visit_initial_svalue (const initial_svalue *sval) final override
5041 {
5042 tree type = sval->get_type ();
5043 if (type && FLOAT_TYPE_P (type)((((enum tree_code) (type)->base.code) == REAL_TYPE) || ((
((enum tree_code) (type)->base.code) == COMPLEX_TYPE || ((
(enum tree_code) (type)->base.code) == VECTOR_TYPE)) &&
(((enum tree_code) (((contains_struct_check ((type), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 5043, __FUNCTION__))->typed.type))->base.code) == REAL_TYPE
)))
)
5044 m_result = sval;
5045 }
5046
5047private:
5048 /* Non-null if at least one floating-point operand was found. */
5049 const svalue *m_result;
5050};
5051
5052/* May complain about uses of floating-point operands in SIZE_IN_BYTES. */
5053
5054void
5055region_model::check_dynamic_size_for_floats (const svalue *size_in_bytes,
5056 region_model_context *ctxt) const
5057{
5058 gcc_assert (ctxt)((void)(!(ctxt) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 5058, __FUNCTION__), 0 : 0))
;
5059
5060 contains_floating_point_visitor v (size_in_bytes);
5061 if (const svalue *float_sval = v.get_svalue_to_report ())
5062 {
5063 tree diag_arg = get_representative_tree (float_sval);
5064 ctxt->warn (make_unique<float_as_size_arg> (diag_arg));
5065 }
5066}
5067
5068/* Return a region describing a heap-allocated block of memory.
5069 Use CTXT to complain about tainted sizes.
5070
5071 Reuse an existing heap_allocated_region if it's not being referenced by
5072 this region_model; otherwise create a new one. */
5073
5074const region *
5075region_model::get_or_create_region_for_heap_alloc (const svalue *size_in_bytes,
5076 region_model_context *ctxt)
5077{
5078 /* Determine which regions are referenced in this region_model, so that
5079 we can reuse an existing heap_allocated_region if it's not in use on
5080 this path. */
5081 auto_bitmap base_regs_in_use;
5082 get_referenced_base_regions (base_regs_in_use);
5083
5084 /* Don't reuse regions that are marked as TOUCHED. */
5085 for (store::cluster_map_t::iterator iter = m_store.begin ();
5086 iter != m_store.end (); ++iter)
5087 if ((*iter).second->touched_p ())
5088 {
5089 const region *base_reg = (*iter).first;
5090 bitmap_set_bit (base_regs_in_use, base_reg->get_id ());
5091 }
5092
5093 const region *reg
5094 = m_mgr->get_or_create_region_for_heap_alloc (base_regs_in_use);
5095 if (size_in_bytes)
5096 if (compat_types_p (size_in_bytes->get_type (), size_type_nodeglobal_trees[TI_SIZE_TYPE]))
5097 set_dynamic_extents (reg, size_in_bytes, ctxt);
5098 return reg;
5099}
5100
5101/* Populate OUT_IDS with the set of IDs of those base regions which are
5102 reachable in this region_model. */
5103
5104void
5105region_model::get_referenced_base_regions (auto_bitmap &out_ids) const
5106{
5107 reachable_regions reachable_regs (const_cast<region_model *> (this));
5108 m_store.for_each_cluster (reachable_regions::init_cluster_cb,
5109 &reachable_regs);
5110 /* Get regions for locals that have explicitly bound values. */
5111 for (store::cluster_map_t::iterator iter = m_store.begin ();
5112 iter != m_store.end (); ++iter)
5113 {
5114 const region *base_reg = (*iter).first;
5115 if (const region *parent = base_reg->get_parent_region ())
5116 if (parent->get_kind () == RK_FRAME)
5117 reachable_regs.add (base_reg, false);
5118 }
5119
5120 bitmap_clear (out_ids);
5121 for (auto iter_reg : reachable_regs)
5122 bitmap_set_bit (out_ids, iter_reg->get_id ());
5123}
5124
5125/* Return a new region describing a block of memory allocated within the
5126 current frame.
5127 Use CTXT to complain about tainted sizes. */
5128
5129const region *
5130region_model::create_region_for_alloca (const svalue *size_in_bytes,
5131 region_model_context *ctxt)
5132{
5133 const region *reg = m_mgr->create_region_for_alloca (m_current_frame);
5134 if (compat_types_p (size_in_bytes->get_type (), size_type_nodeglobal_trees[TI_SIZE_TYPE]))
5135 set_dynamic_extents (reg, size_in_bytes, ctxt);
5136 return reg;
5137}
5138
5139/* Record that the size of REG is SIZE_IN_BYTES.
5140 Use CTXT to complain about tainted sizes. */
5141
5142void
5143region_model::set_dynamic_extents (const region *reg,
5144 const svalue *size_in_bytes,
5145 region_model_context *ctxt)
5146{
5147 assert_compat_types (size_in_bytes->get_type (), size_type_nodeglobal_trees[TI_SIZE_TYPE]);
5148 if (ctxt)
5149 {
5150 check_dynamic_size_for_taint (reg->get_memory_space (), size_in_bytes,
5151 ctxt);
5152 check_dynamic_size_for_floats (size_in_bytes, ctxt);
5153 }
5154 m_dynamic_extents.put (reg, size_in_bytes);
5155}
5156
5157/* Get the recording of REG in bytes, or NULL if no dynamic size was
5158 recorded. */
5159
5160const svalue *
5161region_model::get_dynamic_extents (const region *reg) const
5162{
5163 if (const svalue * const *slot = m_dynamic_extents.get (reg))
5164 return *slot;
5165 return NULLnullptr;
5166}
5167
5168/* Unset any recorded dynamic size of REG. */
5169
5170void
5171region_model::unset_dynamic_extents (const region *reg)
5172{
5173 m_dynamic_extents.remove (reg);
5174}
5175
5176/* Information of the layout of a RECORD_TYPE, capturing it as a vector
5177 of items, where each item is either a field or padding. */
5178
5179class record_layout
5180{
5181public:
5182 /* An item within a record; either a field, or padding after a field. */
5183 struct item
5184 {
5185 public:
5186 item (const bit_range &br,
5187 tree field,
5188 bool is_padding)
5189 : m_bit_range (br),
5190 m_field (field),
5191 m_is_padding (is_padding)
5192 {
5193 }
5194
5195 bit_offset_t get_start_bit_offset () const
5196 {
5197 return m_bit_range.get_start_bit_offset ();
5198 }
5199 bit_offset_t get_next_bit_offset () const
5200 {
5201 return m_bit_range.get_next_bit_offset ();
5202 }
5203
5204 bool contains_p (bit_offset_t offset) const
5205 {
5206 return m_bit_range.contains_p (offset);
5207 }
5208
5209 void dump_to_pp (pretty_printer *pp) const
5210 {
5211 if (m_is_padding)
5212 pp_printf (pp, "padding after %qD", m_field);
5213 else
5214 pp_printf (pp, "%qD", m_field);
5215 pp_string (pp, ", ");
5216 m_bit_range.dump_to_pp (pp);
5217 }
5218
5219 bit_range m_bit_range;
5220 tree m_field;
5221 bool m_is_padding;
5222 };
5223
5224 record_layout (tree record_type)
5225 {
5226 gcc_assert (TREE_CODE (record_type) == RECORD_TYPE)((void)(!(((enum tree_code) (record_type)->base.code) == RECORD_TYPE
) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 5226, __FUNCTION__), 0 : 0))
;
5227
5228 for (tree iter = TYPE_FIELDS (record_type)((tree_check3 ((record_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 5228, __FUNCTION__, (RECORD_TYPE), (UNION_TYPE), (QUAL_UNION_TYPE
)))->type_non_common.values)
; iter != NULL_TREE(tree) nullptr;
5229 iter = DECL_CHAIN (iter)(((contains_struct_check (((contains_struct_check ((iter), (TS_DECL_MINIMAL
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 5229, __FUNCTION__))), (TS_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 5229, __FUNCTION__))->common.chain))
)
5230 {
5231 if (TREE_CODE (iter)((enum tree_code) (iter)->base.code) == FIELD_DECL)
5232 {
5233 int iter_field_offset = int_bit_position (iter);
5234 bit_size_t size_in_bits;
5235 if (!int_size_in_bits (TREE_TYPE (iter)((contains_struct_check ((iter), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 5235, __FUNCTION__))->typed.type)
, &size_in_bits))
5236 size_in_bits = 0;
5237
5238 maybe_pad_to (iter_field_offset);
5239
5240 /* Add field. */
5241 m_items.safe_push (item (bit_range (iter_field_offset,
5242 size_in_bits),
5243 iter, false));
5244 }
5245 }
5246
5247 /* Add any trailing padding. */
5248 bit_size_t size_in_bits;
5249 if (int_size_in_bits (record_type, &size_in_bits))
5250 maybe_pad_to (size_in_bits);
5251 }
5252
5253 void dump_to_pp (pretty_printer *pp) const
5254 {
5255 unsigned i;
5256 item *it;
5257 FOR_EACH_VEC_ELT (m_items, i, it)for (i = 0; (m_items).iterate ((i), &(it)); ++(i))
5258 {
5259 it->dump_to_pp (pp);
5260 pp_newline (pp);
5261 }
5262 }
5263
5264 DEBUG_FUNCTION__attribute__ ((__used__)) void dump () const
5265 {
5266 pretty_printer pp;
5267 pp_format_decoder (&pp)(&pp)->format_decoder = default_tree_printer;
5268 pp.buffer->stream = stderrstderr;
5269 dump_to_pp (&pp);
5270 pp_flush (&pp);
5271 }
5272
5273 const record_layout::item *get_item_at (bit_offset_t offset) const
5274 {
5275 unsigned i;
5276 item *it;
5277 FOR_EACH_VEC_ELT (m_items, i, it)for (i = 0; (m_items).iterate ((i), &(it)); ++(i))
5278 if (it->contains_p (offset))
5279 return it;
5280 return NULLnullptr;
5281 }
5282
5283private:
5284 /* Subroutine of ctor. Add padding item to NEXT_OFFSET if necessary. */
5285
5286 void maybe_pad_to (bit_offset_t next_offset)
5287 {
5288 if (m_items.length () > 0)
5289 {
5290 const item &last_item = m_items[m_items.length () - 1];
5291 bit_offset_t offset_after_last_item
5292 = last_item.get_next_bit_offset ();
5293 if (next_offset > offset_after_last_item)
5294 {
5295 bit_size_t padding_size
5296 = next_offset - offset_after_last_item;
5297 m_items.safe_push (item (bit_range (offset_after_last_item,
5298 padding_size),
5299 last_item.m_field, true));
5300 }
5301 }
5302 }
5303
5304 auto_vec<item> m_items;
5305};
5306
5307/* A subclass of pending_diagnostic for complaining about uninitialized data
5308 being copied across a trust boundary to an untrusted output
5309 (e.g. copy_to_user infoleaks in the Linux kernel). */
5310
5311class exposure_through_uninit_copy
5312 : public pending_diagnostic_subclass<exposure_through_uninit_copy>
5313{
5314public:
5315 exposure_through_uninit_copy (const region *src_region,
5316 const region *dest_region,
5317 const svalue *copied_sval)
5318 : m_src_region (src_region),
5319 m_dest_region (dest_region),
5320 m_copied_sval (copied_sval)
5321 {
5322 gcc_assert (m_copied_sval->get_kind () == SK_POISONED((void)(!(m_copied_sval->get_kind () == SK_POISONED || m_copied_sval
->get_kind () == SK_COMPOUND) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 5323, __FUNCTION__), 0 : 0))
5323 || m_copied_sval->get_kind () == SK_COMPOUND)((void)(!(m_copied_sval->get_kind () == SK_POISONED || m_copied_sval
->get_kind () == SK_COMPOUND) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 5323, __FUNCTION__), 0 : 0))
;
5324 }
5325
5326 const char *get_kind () const final override
5327 {
5328 return "exposure_through_uninit_copy";
5329 }
5330
5331 bool operator== (const exposure_through_uninit_copy &other) const
5332 {
5333 return (m_src_region == other.m_src_region
5334 && m_dest_region == other.m_dest_region
5335 && m_copied_sval == other.m_copied_sval);
5336 }
5337
5338 int get_controlling_option () const final override
5339 {
5340 return OPT_Wanalyzer_exposure_through_uninit_copy;
5341 }
5342
5343 bool emit (rich_location *rich_loc) final override
5344 {
5345 diagnostic_metadata m;
5346 /* CWE-200: Exposure of Sensitive Information to an Unauthorized Actor. */
5347 m.add_cwe (200);
5348 enum memory_space mem_space = get_src_memory_space ();
5349 bool warned;
5350 switch (mem_space)
5351 {
5352 default:
5353 warned = warning_meta
5354 (rich_loc, m, get_controlling_option (),
5355 "potential exposure of sensitive information"
5356 " by copying uninitialized data across trust boundary");
5357 break;
5358 case MEMSPACE_STACK:
5359 warned = warning_meta
5360 (rich_loc, m, get_controlling_option (),
5361 "potential exposure of sensitive information"
5362 " by copying uninitialized data from stack across trust boundary");
5363 break;
5364 case MEMSPACE_HEAP:
5365 warned = warning_meta
5366 (rich_loc, m, get_controlling_option (),
5367 "potential exposure of sensitive information"
5368 " by copying uninitialized data from heap across trust boundary");
5369 break;
5370 }
5371 if (warned)
5372 {
5373 location_t loc = rich_loc->get_loc ();
5374 inform_number_of_uninit_bits (loc);
5375 complain_about_uninit_ranges (loc);
5376
5377 if (mem_space == MEMSPACE_STACK)
5378 maybe_emit_fixit_hint ();
5379 }
5380 return warned;
5381 }
5382
5383 label_text describe_final_event (const evdesc::final_event &) final override
5384 {
5385 enum memory_space mem_space = get_src_memory_space ();
5386 switch (mem_space)
5387 {
5388 default:
5389 return label_text::borrow ("uninitialized data copied here");
5390
5391 case MEMSPACE_STACK:
5392 return label_text::borrow ("uninitialized data copied from stack here");
5393
5394 case MEMSPACE_HEAP:
5395 return label_text::borrow ("uninitialized data copied from heap here");
5396 }
5397 }
5398
5399 void mark_interesting_stuff (interesting_t *interest) final override
5400 {
5401 if (m_src_region)
5402 interest->add_region_creation (m_src_region);
5403 }
5404
5405private:
5406 enum memory_space get_src_memory_space () const
5407 {
5408 return m_src_region ? m_src_region->get_memory_space () : MEMSPACE_UNKNOWN;
5409 }
5410
5411 bit_size_t calc_num_uninit_bits () const
5412 {
5413 switch (m_copied_sval->get_kind ())
5414 {
5415 default:
5416 gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 5416, __FUNCTION__))
;
5417 break;
5418 case SK_POISONED:
5419 {
5420 const poisoned_svalue *poisoned_sval
5421 = as_a <const poisoned_svalue *> (m_copied_sval);
5422 gcc_assert (poisoned_sval->get_poison_kind () == POISON_KIND_UNINIT)((void)(!(poisoned_sval->get_poison_kind () == POISON_KIND_UNINIT
) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 5422, __FUNCTION__), 0 : 0))
;
5423
5424 /* Give up if don't have type information. */
5425 if (m_copied_sval->get_type () == NULL_TREE(tree) nullptr)
5426 return 0;
5427
5428 bit_size_t size_in_bits;
5429 if (int_size_in_bits (m_copied_sval->get_type (), &size_in_bits))
5430 return size_in_bits;
5431
5432 /* Give up if we can't get the size of the type. */
5433 return 0;
5434 }
5435 break;
5436 case SK_COMPOUND:
5437 {
5438 const compound_svalue *compound_sval
5439 = as_a <const compound_svalue *> (m_copied_sval);
5440 bit_size_t result = 0;
5441 /* Find keys for uninit svals. */
5442 for (auto iter : *compound_sval)
5443 {
5444 const svalue *sval = iter.second;
5445 if (const poisoned_svalue *psval
5446 = sval->dyn_cast_poisoned_svalue ())
5447 if (psval->get_poison_kind () == POISON_KIND_UNINIT)
5448 {
5449 const binding_key *key = iter.first;
5450 const concrete_binding *ckey
5451 = key->dyn_cast_concrete_binding ();
5452 gcc_assert (ckey)((void)(!(ckey) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 5452, __FUNCTION__), 0 : 0))
;
5453 result += ckey->get_size_in_bits ();
5454 }
5455 }
5456 return result;
5457 }
5458 }
5459 }
5460
5461 void inform_number_of_uninit_bits (location_t loc) const
5462 {
5463 bit_size_t num_uninit_bits = calc_num_uninit_bits ();
5464 if (num_uninit_bits <= 0)
5465 return;
5466 if (num_uninit_bits % BITS_PER_UNIT(8) == 0)
5467 {
5468 /* Express in bytes. */
5469 byte_size_t num_uninit_bytes = num_uninit_bits / BITS_PER_UNIT(8);
5470 if (num_uninit_bytes == 1)
5471 inform (loc, "1 byte is uninitialized");
5472 else
5473 inform (loc,
5474 "%wu bytes are uninitialized", num_uninit_bytes.to_uhwi ());
5475 }
5476 else
5477 {
5478 /* Express in bits. */
5479 if (num_uninit_bits == 1)
5480 inform (loc, "1 bit is uninitialized");
5481 else
5482 inform (loc,
5483 "%wu bits are uninitialized", num_uninit_bits.to_uhwi ());
5484 }
5485 }
5486
5487 void complain_about_uninit_ranges (location_t loc) const
5488 {
5489 if (const compound_svalue *compound_sval
5490 = m_copied_sval->dyn_cast_compound_svalue ())
5491 {
5492 /* Find keys for uninit svals. */
5493 auto_vec<const concrete_binding *> uninit_keys;
5494 for (auto iter : *compound_sval)
5495 {
5496 const svalue *sval = iter.second;
5497 if (const poisoned_svalue *psval
5498 = sval->dyn_cast_poisoned_svalue ())
5499 if (psval->get_poison_kind () == POISON_KIND_UNINIT)
5500 {
5501 const binding_key *key = iter.first;
5502 const concrete_binding *ckey
5503 = key->dyn_cast_concrete_binding ();
5504 gcc_assert (ckey)((void)(!(ckey) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 5504, __FUNCTION__), 0 : 0))
;
5505 uninit_keys.safe_push (ckey);
5506 }
5507 }
5508 /* Complain about them in sorted order. */
5509 uninit_keys.qsort (concrete_binding::cmp_ptr_ptr)qsort (concrete_binding::cmp_ptr_ptr);
5510
5511 std::unique_ptr<record_layout> layout;
5512
5513 tree type = m_copied_sval->get_type ();
5514 if (type && TREE_CODE (type)((enum tree_code) (type)->base.code) == RECORD_TYPE)
5515 {
5516 // (std::make_unique is C++14)
5517 layout = std::unique_ptr<record_layout> (new record_layout (type));
5518
5519 if (0)
5520 layout->dump ();
5521 }
5522
5523 unsigned i;
5524 const concrete_binding *ckey;
5525 FOR_EACH_VEC_ELT (uninit_keys, i, ckey)for (i = 0; (uninit_keys).iterate ((i), &(ckey)); ++(i))
5526 {
5527 bit_offset_t start_bit = ckey->get_start_bit_offset ();
5528 bit_offset_t next_bit = ckey->get_next_bit_offset ();
5529 complain_about_uninit_range (loc, start_bit, next_bit,
5530 layout.get ());
5531 }
5532 }
5533 }
5534
5535 void complain_about_uninit_range (location_t loc,
5536 bit_offset_t start_bit,
5537 bit_offset_t next_bit,
5538 const record_layout *layout) const
5539 {
5540 if (layout)
5541 {
5542 while (start_bit < next_bit)
5543 {
5544 if (const record_layout::item *item
5545 = layout->get_item_at (start_bit))
5546 {
5547 gcc_assert (start_bit >= item->get_start_bit_offset ())((void)(!(start_bit >= item->get_start_bit_offset ()) ?
fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 5547, __FUNCTION__), 0 : 0))
;
5548 gcc_assert (start_bit < item->get_next_bit_offset ())((void)(!(start_bit < item->get_next_bit_offset ()) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 5548, __FUNCTION__), 0 : 0))
;
5549 if (item->get_start_bit_offset () == start_bit
5550 && item->get_next_bit_offset () <= next_bit)
5551 complain_about_fully_uninit_item (*item);
5552 else
5553 complain_about_partially_uninit_item (*item);
5554 start_bit = item->get_next_bit_offset ();
5555 continue;
5556 }
5557 else
5558 break;
5559 }
5560 }
5561
5562 if (start_bit >= next_bit)
5563 return;
5564
5565 if (start_bit % 8 == 0 && next_bit % 8 == 0)
5566 {
5567 /* Express in bytes. */
5568 byte_offset_t start_byte = start_bit / 8;
5569 byte_offset_t last_byte = (next_bit / 8) - 1;
5570 if (last_byte == start_byte)
5571 inform (loc,
5572 "byte %wu is uninitialized",
5573 start_byte.to_uhwi ());
5574 else
5575 inform (loc,
5576 "bytes %wu - %wu are uninitialized",
5577 start_byte.to_uhwi (),
5578 last_byte.to_uhwi ());
5579 }
5580 else
5581 {
5582 /* Express in bits. */
5583 bit_offset_t last_bit = next_bit - 1;
5584 if (last_bit == start_bit)
5585 inform (loc,
5586 "bit %wu is uninitialized",
5587 start_bit.to_uhwi ());
5588 else
5589 inform (loc,
5590 "bits %wu - %wu are uninitialized",
5591 start_bit.to_uhwi (),
5592 last_bit.to_uhwi ());
5593 }
5594 }
5595
5596 static void
5597 complain_about_fully_uninit_item (const record_layout::item &item)
5598 {
5599 tree field = item.m_field;
5600 bit_size_t num_bits = item.m_bit_range.m_size_in_bits;
5601 if (item.m_is_padding)
5602 {
5603 if (num_bits % 8 == 0)
5604 {
5605 /* Express in bytes. */
5606 byte_size_t num_bytes = num_bits / BITS_PER_UNIT(8);
5607 if (num_bytes == 1)
5608 inform (DECL_SOURCE_LOCATION (field)((contains_struct_check ((field), (TS_DECL_MINIMAL), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 5608, __FUNCTION__))->decl_minimal.locus)
,
5609 "padding after field %qD is uninitialized (1 byte)",
5610 field);
5611 else
5612 inform (DECL_SOURCE_LOCATION (field)((contains_struct_check ((field), (TS_DECL_MINIMAL), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 5612, __FUNCTION__))->decl_minimal.locus)
,
5613 "padding after field %qD is uninitialized (%wu bytes)",
5614 field, num_bytes.to_uhwi ());
5615 }
5616 else
5617 {
5618 /* Express in bits. */
5619 if (num_bits == 1)
5620 inform (DECL_SOURCE_LOCATION (field)((contains_struct_check ((field), (TS_DECL_MINIMAL), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 5620, __FUNCTION__))->decl_minimal.locus)
,
5621 "padding after field %qD is uninitialized (1 bit)",
5622 field);
5623 else
5624 inform (DECL_SOURCE_LOCATION (field)((contains_struct_check ((field), (TS_DECL_MINIMAL), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 5624, __FUNCTION__))->decl_minimal.locus)
,
5625 "padding after field %qD is uninitialized (%wu bits)",
5626 field, num_bits.to_uhwi ());
5627 }
5628 }
5629 else
5630 {
5631 if (num_bits % 8 == 0)
5632 {
5633 /* Express in bytes. */
5634 byte_size_t num_bytes = num_bits / BITS_PER_UNIT(8);
5635 if (num_bytes == 1)
5636 inform (DECL_SOURCE_LOCATION (field)((contains_struct_check ((field), (TS_DECL_MINIMAL), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 5636, __FUNCTION__))->decl_minimal.locus)
,
5637 "field %qD is uninitialized (1 byte)", field);
5638 else
5639 inform (DECL_SOURCE_LOCATION (field)((contains_struct_check ((field), (TS_DECL_MINIMAL), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 5639, __FUNCTION__))->decl_minimal.locus)
,
5640 "field %qD is uninitialized (%wu bytes)",
5641 field, num_bytes.to_uhwi ());
5642 }
5643 else
5644 {
5645 /* Express in bits. */
5646 if (num_bits == 1)
5647 inform (DECL_SOURCE_LOCATION (field)((contains_struct_check ((field), (TS_DECL_MINIMAL), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 5647, __FUNCTION__))->decl_minimal.locus)
,
5648 "field %qD is uninitialized (1 bit)", field);
5649 else
5650 inform (DECL_SOURCE_LOCATION (field)((contains_struct_check ((field), (TS_DECL_MINIMAL), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 5650, __FUNCTION__))->decl_minimal.locus)
,
5651 "field %qD is uninitialized (%wu bits)",
5652 field, num_bits.to_uhwi ());
5653 }
5654 }
5655 }
5656
5657 static void
5658 complain_about_partially_uninit_item (const record_layout::item &item)
5659 {
5660 tree field = item.m_field;
5661 if (item.m_is_padding)
5662 inform (DECL_SOURCE_LOCATION (field)((contains_struct_check ((field), (TS_DECL_MINIMAL), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 5662, __FUNCTION__))->decl_minimal.locus)
,
5663 "padding after field %qD is partially uninitialized",
5664 field);
5665 else
5666 inform (DECL_SOURCE_LOCATION (field)((contains_struct_check ((field), (TS_DECL_MINIMAL), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 5666, __FUNCTION__))->decl_minimal.locus)
,
5667 "field %qD is partially uninitialized",
5668 field);
5669 /* TODO: ideally we'd describe what parts are uninitialized. */
5670 }
5671
5672 void maybe_emit_fixit_hint () const
5673 {
5674 if (tree decl = m_src_region->maybe_get_decl ())
5675 {
5676 gcc_rich_location hint_richloc (DECL_SOURCE_LOCATION (decl)((contains_struct_check ((decl), (TS_DECL_MINIMAL), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 5676, __FUNCTION__))->decl_minimal.locus)
);
5677 hint_richloc.add_fixit_insert_after (" = {0}");
5678 inform (&hint_richloc,
5679 "suggest forcing zero-initialization by"
5680 " providing a %<{0}%> initializer");
5681 }
5682 }
5683
5684private:
5685 const region *m_src_region;
5686 const region *m_dest_region;
5687 const svalue *m_copied_sval;
5688};
5689
5690/* Return true if any part of SVAL is uninitialized. */
5691
5692static bool
5693contains_uninit_p (const svalue *sval)
5694{
5695 struct uninit_finder : public visitor
5696 {
5697 public:
5698 uninit_finder () : m_found_uninit (false) {}
5699 void visit_poisoned_svalue (const poisoned_svalue *sval)
5700 {
5701 if (sval->get_poison_kind () == POISON_KIND_UNINIT)
5702 m_found_uninit = true;
5703 }
5704 bool m_found_uninit;
5705 };
5706
5707 uninit_finder v;
5708 sval->accept (&v);
5709
5710 return v.m_found_uninit;
5711}
5712
5713/* Function for use by plugins when simulating writing data through a
5714 pointer to an "untrusted" region DST_REG (and thus crossing a security
5715 boundary), such as copying data to user space in an OS kernel.
5716
5717 Check that COPIED_SVAL is fully initialized. If not, complain about
5718 an infoleak to CTXT.
5719
5720 SRC_REG can be NULL; if non-NULL it is used as a hint in the diagnostic
5721 as to where COPIED_SVAL came from. */
5722
5723void
5724region_model::maybe_complain_about_infoleak (const region *dst_reg,
5725 const svalue *copied_sval,
5726 const region *src_reg,
5727 region_model_context *ctxt)
5728{
5729 /* Check for exposure. */
5730 if (contains_uninit_p (copied_sval))
5731 ctxt->warn (make_unique<exposure_through_uninit_copy> (src_reg,
5732 dst_reg,
5733 copied_sval));
5734}
5735
5736/* Set errno to a positive symbolic int, as if some error has occurred. */
5737
5738void
5739region_model::set_errno (const call_details &cd)
5740{
5741 const region *errno_reg = m_mgr->get_errno_region ();
5742 conjured_purge p (this, cd.get_ctxt ());
5743 const svalue *new_errno_sval
5744 = m_mgr->get_or_create_conjured_svalue (integer_type_nodeinteger_types[itk_int],
5745 cd.get_call_stmt (),
5746 errno_reg, p);
5747 const svalue *zero
5748 = m_mgr->get_or_create_int_cst (integer_type_nodeinteger_types[itk_int], 0);
5749 add_constraint (new_errno_sval, GT_EXPR, zero, cd.get_ctxt ());
5750 set_value (errno_reg, new_errno_sval, cd.get_ctxt ());
5751}
5752
5753/* class noop_region_model_context : public region_model_context. */
5754
5755void
5756noop_region_model_context::add_note (std::unique_ptr<pending_note>)
5757{
5758}
5759
5760void
5761noop_region_model_context::bifurcate (std::unique_ptr<custom_edge_info>)
5762{
5763}
5764
5765void
5766noop_region_model_context::terminate_path ()
5767{
5768}
5769
5770/* struct model_merger. */
5771
5772/* Dump a multiline representation of this merger to PP. */
5773
5774void
5775model_merger::dump_to_pp (pretty_printer *pp, bool simple) const
5776{
5777 pp_string (pp, "model A:");
5778 pp_newline (pp);
5779 m_model_a->dump_to_pp (pp, simple, true);
5780 pp_newline (pp);
5781
5782 pp_string (pp, "model B:");
5783 pp_newline (pp);
5784 m_model_b->dump_to_pp (pp, simple, true);
5785 pp_newline (pp);
5786
5787 pp_string (pp, "merged model:");
5788 pp_newline (pp);
5789 m_merged_model->dump_to_pp (pp, simple, true);
5790 pp_newline (pp);
5791}
5792
5793/* Dump a multiline representation of this merger to FILE. */
5794
5795void
5796model_merger::dump (FILE *fp, bool simple) const
5797{
5798 pretty_printer pp;
5799 pp_format_decoder (&pp)(&pp)->format_decoder = default_tree_printer;
5800 pp_show_color (&pp)(&pp)->show_color = pp_show_color (global_dc->printer)(global_dc->printer)->show_color;
5801 pp.buffer->stream = fp;
5802 dump_to_pp (&pp, simple);
5803 pp_flush (&pp);
5804}
5805
5806/* Dump a multiline representation of this merger to stderr. */
5807
5808DEBUG_FUNCTION__attribute__ ((__used__)) void
5809model_merger::dump (bool simple) const
5810{
5811 dump (stderrstderr, simple);
5812}
5813
5814/* Return true if it's OK to merge SVAL with other svalues. */
5815
5816bool
5817model_merger::mergeable_svalue_p (const svalue *sval) const
5818{
5819 if (m_ext_state)
5820 {
5821 /* Reject merging svalues that have non-purgable sm-state,
5822 to avoid falsely reporting memory leaks by merging them
5823 with something else. For example, given a local var "p",
5824 reject the merger of a:
5825 store_a mapping "p" to a malloc-ed ptr
5826 with:
5827 store_b mapping "p" to a NULL ptr. */
5828 if (m_state_a)
5829 if (!m_state_a->can_purge_p (*m_ext_state, sval))
5830 return false;
5831 if (m_state_b)
5832 if (!m_state_b->can_purge_p (*m_ext_state, sval))
5833 return false;
5834 }
5835 return true;
5836}
5837
5838} // namespace ana
5839
5840/* Dump RMODEL fully to stderr (i.e. without summarization). */
5841
5842DEBUG_FUNCTION__attribute__ ((__used__)) void
5843debug (const region_model &rmodel)
5844{
5845 rmodel.dump (false);
5846}
5847
5848/* class rejected_op_constraint : public rejected_constraint. */
5849
5850void
5851rejected_op_constraint::dump_to_pp (pretty_printer *pp) const
5852{
5853 region_model m (m_model);
5854 const svalue *lhs_sval = m.get_rvalue (m_lhs, NULLnullptr);
5855 const svalue *rhs_sval = m.get_rvalue (m_rhs, NULLnullptr);
5856 lhs_sval->dump_to_pp (pp, true);
5857 pp_printf (pp, " %s ", op_symbol_code (m_op));
5858 rhs_sval->dump_to_pp (pp, true);
5859}
5860
5861/* class rejected_default_case : public rejected_constraint. */
5862
5863void
5864rejected_default_case::dump_to_pp (pretty_printer *pp) const
5865{
5866 pp_string (pp, "implicit default for enum");
5867}
5868
5869/* class rejected_ranges_constraint : public rejected_constraint. */
5870
5871void
5872rejected_ranges_constraint::dump_to_pp (pretty_printer *pp) const
5873{
5874 region_model m (m_model);
5875 const svalue *sval = m.get_rvalue (m_expr, NULLnullptr);
5876 sval->dump_to_pp (pp, true);
5877 pp_string (pp, " in ");
5878 m_ranges->dump_to_pp (pp, true);
5879}
5880
5881/* class engine. */
5882
5883/* engine's ctor. */
5884
5885engine::engine (const supergraph *sg, logger *logger)
5886: m_sg (sg), m_mgr (logger)
5887{
5888}
5889
5890/* Dump the managed objects by class to LOGGER, and the per-class totals. */
5891
5892void
5893engine::log_stats (logger *logger) const
5894{
5895 m_mgr.log_stats (logger, true);
5896}
5897
5898namespace ana {
5899
5900#if CHECKING_P1
5901
5902namespace selftest {
5903
5904/* Build a constant tree of the given type from STR. */
5905
5906static tree
5907build_real_cst_from_string (tree type, const char *str)
5908{
5909 REAL_VALUE_TYPEstruct real_value real;
5910 real_from_string (&real, str);
5911 return build_real (type, real);
5912}
5913
5914/* Append various "interesting" constants to OUT (e.g. NaN). */
5915
5916static void
5917append_interesting_constants (auto_vec<tree> *out)
5918{
5919 out->safe_push (build_int_cst (integer_type_nodeinteger_types[itk_int], 0));
5920 out->safe_push (build_int_cst (integer_type_nodeinteger_types[itk_int], 42));
5921 out->safe_push (build_int_cst (unsigned_type_nodeinteger_types[itk_unsigned_int], 0));
5922 out->safe_push (build_int_cst (unsigned_type_nodeinteger_types[itk_unsigned_int], 42));
5923 out->safe_push (build_real_cst_from_string (float_type_nodeglobal_trees[TI_FLOAT_TYPE], "QNaN"));
5924 out->safe_push (build_real_cst_from_string (float_type_nodeglobal_trees[TI_FLOAT_TYPE], "-QNaN"));
5925 out->safe_push (build_real_cst_from_string (float_type_nodeglobal_trees[TI_FLOAT_TYPE], "SNaN"));
5926 out->safe_push (build_real_cst_from_string (float_type_nodeglobal_trees[TI_FLOAT_TYPE], "-SNaN"));
5927 out->safe_push (build_real_cst_from_string (float_type_nodeglobal_trees[TI_FLOAT_TYPE], "0.0"));
5928 out->safe_push (build_real_cst_from_string (float_type_nodeglobal_trees[TI_FLOAT_TYPE], "-0.0"));
5929 out->safe_push (build_real_cst_from_string (float_type_nodeglobal_trees[TI_FLOAT_TYPE], "Inf"));
5930 out->safe_push (build_real_cst_from_string (float_type_nodeglobal_trees[TI_FLOAT_TYPE], "-Inf"));
5931}
5932
5933/* Verify that tree_cmp is a well-behaved comparator for qsort, even
5934 if the underlying constants aren't comparable. */
5935
5936static void
5937test_tree_cmp_on_constants ()
5938{
5939 auto_vec<tree> csts;
5940 append_interesting_constants (&csts);
5941
5942 /* Try sorting every triple. */
5943 const unsigned num = csts.length ();
5944 for (unsigned i = 0; i < num; i++)
5945 for (unsigned j = 0; j < num; j++)
5946 for (unsigned k = 0; k < num; k++)
5947 {
5948 auto_vec<tree> v (3);
5949 v.quick_push (csts[i]);
5950 v.quick_push (csts[j]);
5951 v.quick_push (csts[k]);
5952 v.qsort (tree_cmp)qsort (tree_cmp);
5953 }
5954}
5955
5956/* Implementation detail of the ASSERT_CONDITION_* macros. */
5957
5958void
5959assert_condition (const location &loc,
5960 region_model &model,
5961 const svalue *lhs, tree_code op, const svalue *rhs,
5962 tristate expected)
5963{
5964 tristate actual = model.eval_condition (lhs, op, rhs);
5965 ASSERT_EQ_AT (loc, actual, expected)do { const char *desc_ = "ASSERT_EQ (" "actual" ", " "expected"
")"; if ((actual) == (expected)) ::selftest::pass ((loc), desc_
); else ::selftest::fail ((loc), desc_); } while (0)
;
5966}
5967
5968/* Implementation detail of the ASSERT_CONDITION_* macros. */
5969
5970void
5971assert_condition (const location &loc,
5972 region_model &model,
5973 tree lhs, tree_code op, tree rhs,
5974 tristate expected)
5975{
5976 tristate actual = model.eval_condition (lhs, op, rhs, NULLnullptr);
5977 ASSERT_EQ_AT (loc, actual, expected)do { const char *desc_ = "ASSERT_EQ (" "actual" ", " "expected"
")"; if ((actual) == (expected)) ::selftest::pass ((loc), desc_
); else ::selftest::fail ((loc), desc_); } while (0)
;
5978}
5979
5980/* Implementation detail of ASSERT_DUMP_TREE_EQ. */
5981
5982static void
5983assert_dump_tree_eq (const location &loc, tree t, const char *expected)
5984{
5985 auto_fix_quotes sentinel;
5986 pretty_printer pp;
5987 pp_format_decoder (&pp)(&pp)->format_decoder = default_tree_printer;
5988 dump_tree (&pp, t);
5989 ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected)do { ::selftest::assert_streq ((loc), "pp_formatted_text (&pp)"
, "expected", (pp_formatted_text (&pp)), (expected)); } while
(0)
;
5990}
5991
5992/* Assert that dump_tree (T) is EXPECTED. */
5993
5994#define ASSERT_DUMP_TREE_EQ(T, EXPECTED)do { assert_dump_tree_eq (((::selftest::location ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 5994, __FUNCTION__))), (T), (EXPECTED)); } while (0)
\
5995 SELFTEST_BEGIN_STMTdo { \
5996 assert_dump_tree_eq ((SELFTEST_LOCATION(::selftest::location ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 5996, __FUNCTION__))
), (T), (EXPECTED)); \
5997 SELFTEST_END_STMT} while (0)
5998
5999/* Implementation detail of ASSERT_DUMP_EQ. */
6000
6001static void
6002assert_dump_eq (const location &loc,
6003 const region_model &model,
6004 bool summarize,
6005 const char *expected)
6006{
6007 auto_fix_quotes sentinel;
6008 pretty_printer pp;
6009 pp_format_decoder (&pp)(&pp)->format_decoder = default_tree_printer;
6010
6011 model.dump_to_pp (&pp, summarize, true);
6012 ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected)do { ::selftest::assert_streq ((loc), "pp_formatted_text (&pp)"
, "expected", (pp_formatted_text (&pp)), (expected)); } while
(0)
;
6013}
6014
6015/* Assert that MODEL.dump_to_pp (SUMMARIZE) is EXPECTED. */
6016
6017#define ASSERT_DUMP_EQ(MODEL, SUMMARIZE, EXPECTED)do { assert_dump_eq (((::selftest::location ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 6017, __FUNCTION__))), (MODEL), (SUMMARIZE), (EXPECTED)); }
while (0)
\
6018 SELFTEST_BEGIN_STMTdo { \
6019 assert_dump_eq ((SELFTEST_LOCATION(::selftest::location ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 6019, __FUNCTION__))
), (MODEL), (SUMMARIZE), (EXPECTED)); \
6020 SELFTEST_END_STMT} while (0)
6021
6022/* Smoketest for region_model::dump_to_pp. */
6023
6024static void
6025test_dump ()
6026{
6027 region_model_manager mgr;
6028 region_model model (&mgr);
6029
6030 ASSERT_DUMP_EQ (model, false,do { assert_dump_eq (((::selftest::location ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 6035, __FUNCTION__))), (model), (false), ("stack depth: 0\n"
"m_called_unknown_fn: FALSE\n" "constraint_manager:\n" " equiv classes:\n"
" constraints:\n")); } while (0)
6031 "stack depth: 0\n"do { assert_dump_eq (((::selftest::location ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 6035, __FUNCTION__))), (model), (false), ("stack depth: 0\n"
"m_called_unknown_fn: FALSE\n" "constraint_manager:\n" " equiv classes:\n"
" constraints:\n")); } while (0)
6032 "m_called_unknown_fn: FALSE\n"do { assert_dump_eq (((::selftest::location ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 6035, __FUNCTION__))), (model), (false), ("stack depth: 0\n"
"m_called_unknown_fn: FALSE\n" "constraint_manager:\n" " equiv classes:\n"
" constraints:\n")); } while (0)
6033 "constraint_manager:\n"do { assert_dump_eq (((::selftest::location ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 6035, __FUNCTION__))), (model), (false), ("stack depth: 0\n"
"m_called_unknown_fn: FALSE\n" "constraint_manager:\n" " equiv classes:\n"
" constraints:\n")); } while (0)
6034 " equiv classes:\n"do { assert_dump_eq (((::selftest::location ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 6035, __FUNCTION__))), (model), (false), ("stack depth: 0\n"
"m_called_unknown_fn: FALSE\n" "constraint_manager:\n" " equiv classes:\n"
" constraints:\n")); } while (0)
6035 " constraints:\n")do { assert_dump_eq (((::selftest::location ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 6035, __FUNCTION__))), (model), (false), ("stack depth: 0\n"
"m_called_unknown_fn: FALSE\n" "constraint_manager:\n" " equiv classes:\n"
" constraints:\n")); } while (0)
;
6036 ASSERT_DUMP_EQ (model, true,do { assert_dump_eq (((::selftest::location ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 6041, __FUNCTION__))), (model), (true), ("stack depth: 0\n"
"m_called_unknown_fn: FALSE\n" "constraint_manager:\n" " equiv classes:\n"
" constraints:\n")); } while (0)
6037 "stack depth: 0\n"do { assert_dump_eq (((::selftest::location ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 6041, __FUNCTION__))), (model), (true), ("stack depth: 0\n"
"m_called_unknown_fn: FALSE\n" "constraint_manager:\n" " equiv classes:\n"
" constraints:\n")); } while (0)
6038 "m_called_unknown_fn: FALSE\n"do { assert_dump_eq (((::selftest::location ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 6041, __FUNCTION__))), (model), (true), ("stack depth: 0\n"
"m_called_unknown_fn: FALSE\n" "constraint_manager:\n" " equiv classes:\n"
" constraints:\n")); } while (0)
6039 "constraint_manager:\n"do { assert_dump_eq (((::selftest::location ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 6041, __FUNCTION__))), (model), (true), ("stack depth: 0\n"
"m_called_unknown_fn: FALSE\n" "constraint_manager:\n" " equiv classes:\n"
" constraints:\n")); } while (0)
6040 " equiv classes:\n"do { assert_dump_eq (((::selftest::location ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 6041, __FUNCTION__))), (model), (true), ("stack depth: 0\n"
"m_called_unknown_fn: FALSE\n" "constraint_manager:\n" " equiv classes:\n"
" constraints:\n")); } while (0)
6041 " constraints:\n")do { assert_dump_eq (((::selftest::location ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 6041, __FUNCTION__))), (model), (true), ("stack depth: 0\n"
"m_called_unknown_fn: FALSE\n" "constraint_manager:\n" " equiv classes:\n"
" constraints:\n")); } while (0)
;
6042}
6043
6044/* Helper function for selftests. Create a struct or union type named NAME,
6045 with the fields given by the FIELD_DECLS in FIELDS.
6046 If IS_STRUCT is true create a RECORD_TYPE (aka a struct), otherwise
6047 create a UNION_TYPE. */
6048
6049static tree
6050make_test_compound_type (const char *name, bool is_struct,
6051 const auto_vec<tree> *fields)
6052{
6053 tree t = make_node (is_struct ? RECORD_TYPE : UNION_TYPE);
6054 TYPE_NAME (t)((tree_class_check ((t), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/analyzer/region-model.cc"
, 6054, __FUNCTION__))->type_common.name)
= get_identifier (name)(__builtin_constant_p (name) ? get_identifier_with_length ((name
), strlen (name)) : get_identifier (name))
;
6055 TYPE_SIZE (t)((tree_class_che