40using namespace SVFUtil;
148 assert(
false &&
"No entry functions found for analysis");
233 else if (SVFUtil::isa<CallCFGEdge>(
edge))
238 else if (SVFUtil::isa<RetCFGEdge>(
edge))
283 if (
var->getInEdges().empty())
290 const SVFVar* src = cs->getRHSVar();
292 return SVFUtil::dyn_cast<LoadStmt>(*src->
getInEdges().begin());
346 predicate =
it->second;
379 predicate =
it->second;
433 *
edge->getCondition()->getInEdges().begin());
457 for (
int i = 0;
i < 2;
i++)
459 if (
opVal[
i].getInterval().is_numeral() || !
opVal[1-
i].getInterval().is_numeral())
465 predicate,
succ,
i == 0,
476 if (
as.inAddrToValTable(
objId))
493 if (!
as.inVarToValTable(
var->getId()) && !
as.inVarToAddrsTable(
var->getId()))
506 if (
const LoadStmt* load = SVFUtil::dyn_cast<LoadStmt>(
stmt))
514 if (
as.inAddrToValTable(
objId))
527 assert(!
cmpVar->getInEdges().empty() &&
"branch condition has no defining edge?");
528 if (SVFUtil::isa<CmpStmt>(*
cmpVar->getInEdges().begin()))
542 bool isFunEntry = SVFUtil::isa<FunEntryICFGNode>(node);
607 while (!worklist.
empty())
641 assert (
false &&
"it is not call node");
671 if (
retNode->getSVFStmts().size() > 0)
673 if (
const RetPE *retPE = SVFUtil::dyn_cast<RetPE>(*
retNode->getSVFStmts().begin()))
675 if (!retPE->getLHSVar()->isPointer() &&
676 !retPE->getLHSVar()->isConstDataOrAggDataButNotNullPtr())
682 if (
retNode->getOutEdges().size() > 1)
689 for (
const ICFGNode* node : bb->getICFGNodeList())
693 if (
const StoreStmt* store = SVFUtil::dyn_cast<StoreStmt>(
stmt))
696 if (!
rhsVar->isPointer() && !
rhsVar->isConstDataOrAggDataButNotNullPtr())
741 if (!
Addrs.isAddr() ||
Addrs.getAddrs().empty())
746 return SVFUtil::dyn_cast<FunObjVar>(
func_var);
779 assert(
false &&
"TOP mode should not reach narrowing phase for recursive functions");
786 assert(
false &&
"Unknown recursion handling mode");
821 if (
callee->isDeclaration())
932 for (
const auto& [
id,
val] :
next.getVarToVal())
953 for (
const auto& [
id,
val] :
next.getVarToVal())
1042 else if (SVFUtil::isa<UnaryOPStmt>(
stmt))
1045 else if (SVFUtil::isa<BranchStmt>(
stmt))
1049 else if (
const LoadStmt *load = SVFUtil::dyn_cast<LoadStmt>(
stmt))
1053 else if (
const StoreStmt *store = SVFUtil::dyn_cast<StoreStmt>(
stmt))
1073 else if (
const CallPE *callPE = SVFUtil::dyn_cast<CallPE>(
stmt))
1078 else if (
const RetPE *retPE = SVFUtil::dyn_cast<RetPE>(
stmt))
1083 assert(
false &&
"implement this part");
1091 (!
it->second.isInterval() && !
it->second.isAddr()));
1110 if (
condVal.getInterval().is_numeral())
1209 switch (
binary->getOpcode())
1252 assert(
false &&
"undefined binary: ");
1301 auto predicate =
cmp->getPredicate();
1353 assert(
false &&
"undefined compare: ");
1361 auto predicate =
cmp->getPredicate();
1368 if (
lhs.hasIntersect(
rhs))
1372 else if (
lhs.empty() &&
rhs.empty())
1386 if (
lhs.hasIntersect(
rhs))
1390 else if (
lhs.empty() &&
rhs.empty())
1405 if (
lhs.size() == 1 &&
rhs.size() == 1)
1420 if (
lhs.size() == 1 &&
rhs.size() == 1)
1435 if (
lhs.size() == 1 &&
rhs.size() == 1)
1450 if (
lhs.size() == 1 &&
rhs.size() == 1)
1474 assert(
false &&
"undefined compare: ");
1502 auto getZExtValue = [&](
const SVFVar*
var)
1505 if (SVFUtil::isa<SVFIntegerType>(
type))
1509 if (
val.getInterval().is_numeral())
1517 else if (
bits == 16)
1523 else if (
bits == 32)
1529 else if (
bits == 64)
1535 assert(
false &&
"cannot support int type other than u8/16/32/64");
1548 if(
itv.isBottom())
return itv;
1578 assert(
false &&
"cannot support dst int type other than u8/16/32");
1635 assert(
false &&
"undefined copy kind");
static const LoadStmt * findBackingLoad(const SVFVar *var)
static IntervalValue computeCmpConstraint(s32_t predicate, s64_t succ, bool isLHS, const IntervalValue &self, const IntervalValue &other)
void performStat() override
u32_t & getICFGNodeTrace()
Handles external API calls and manages abstract states.
void handleExtAPI(const CallICFGNode *call)
Handles an external API call.
IntervalValue getRangeLimitFromType(const SVFType *type)
Gets the range limit from a type.
void updateStateOnCall(const CallPE *callPE)
const FunObjVar * getCallee(const CallICFGNode *callNode)
Get callee function: directly for direct calls, via pointer analysis for indirect calls.
bool isBranchFeasible(const IntraCFGEdge *edge, AbstractState &as)
Returns true if the branch is reachable; narrows as in-place.
void updateStateOnStore(const StoreStmt *store)
bool hasAbsState(const ICFGNode *node)
bool narrowCycleState(const AbstractState &prev, const AbstractState &cur, const ICFGCycleWTO *cycle)
Narrow prev with cur; write the narrowed state back and scatter.
virtual void handleFunCall(const CallICFGNode *callNode)
void updateAbsValue(const SVFVar *var, const AbstractValue &val, const ICFGNode *node)
void analyzeFromAllProgEntries()
Analyze all entry points (functions without callers)
void updateStateOnGep(const GepStmt *gep)
void analyse()
Program entry.
virtual void handleGlobalNode()
Initialize abstract state for the global ICFG node and process global statements.
AbstractStateManager * svfStateMgr
void handleFunction(const ICFGNode *funEntry, const CallICFGNode *caller=nullptr)
Handle a function body via worklist-driven WTO traversal starting from funEntry.
virtual bool isExtCall(const CallICFGNode *callNode)
bool isSwitchBranchFeasible(const IntraCFGEdge *edge, AbstractState &as)
Returns true if the switch branch is feasible; narrows as in-place.
AbstractInterpretation()
Constructor.
AbstractState getFullCycleHeadState(const ICFGCycleWTO *cycle)
void updateStateOnPhi(const PhiStmt *phi)
bool handleICFGNode(const ICFGNode *node)
Handle an ICFG node: execute statements; return true if state changed.
std::vector< std::unique_ptr< AEDetector > > detectors
virtual void handleExtCall(const CallICFGNode *callNode)
virtual void skipRecursionWithTop(const CallICFGNode *callNode)
bool widenCycleState(const AbstractState &prev, const AbstractState &cur, const ICFGCycleWTO *cycle)
PreAnalysis * preAnalysis
const AbstractValue & getAbsValue(const SVFVar *var, const ICFGNode *node)
bool mergeStatesFromPredecessors(const ICFGNode *node)
void updateStateOnSelect(const SelectStmt *select)
virtual void handleSVFStatement(const SVFStmt *stmt)
Dispatch an SVF statement (Addr/Binary/Cmp/Load/Store/Copy/Gep/Select/Phi/Call/Ret) to its handler.
bool skipRecursiveCall(const CallICFGNode *callNode)
Skip recursive callsites (within SCC); entry calls from outside SCC are not skipped.
SVFIR * svfir
protected data members, also used in subclasses
virtual void runOnModule(ICFG *icfg)
virtual bool isRecursiveCallSite(const CallICFGNode *callNode, const FunObjVar *)
Check if caller and callee are in the same CallGraph SCC (i.e. a recursive callsite)
bool shouldApplyNarrowing(const FunObjVar *fun)
Check if narrowing should be applied: always for regular loops, mode-dependent for recursion.
virtual bool isRecursiveFun(const FunObjVar *fun)
Check if a function is recursive (part of a call graph SCC)
void updateStateOnAddr(const AddrStmt *addr)
virtual ~AbstractInterpretation()
Destructor.
bool hasAbsValue(const SVFVar *var, const ICFGNode *node)
virtual void handleCallSite(const ICFGNode *node)
Handle a call site node: dispatch to ext-call, direct-call, or indirect-call handling.
void updateStateOnRet(const RetPE *retPE)
void updateStateOnCopy(const CopyStmt *copy)
const AbstractState & getAbsState(const ICFGNode *node) const
void propagateObjVarAbsVal(const ObjVar *var, const ICFGNode *defSite)
Propagate an ObjVar's abstract value from defSite to all its use-sites.
std::deque< const FunObjVar * > collectProgEntryFuns()
Get all entry point functions (functions without callers)
void updateStateOnLoad(const LoadStmt *load)
void updateStateOnBinary(const BinaryOPStmt *binary)
bool isCmpBranchFeasible(const IntraCFGEdge *edge, AbstractState &as)
Returns true if the cmp-conditional branch is feasible; narrows as in-place.
Set< const ICFGNode * > allAnalyzedNodes
void updateAbsState(const ICFGNode *node, const AbstractState &state)
virtual void handleLoopOrRecursion(const ICFGCycleWTO *cycle, const CallICFGNode *caller=nullptr)
Handle a WTO cycle (loop or recursive function) using widening/narrowing iteration.
const SVFVar * getSVFVar(NodeID varId) const
Retrieve SVFVar given its ID; asserts if no such variable exists.
void updateStateOnCmp(const CmpStmt *cmp)
void storeValue(const ValVar *pointer, const AbstractValue &val, const ICFGNode *node)
Map< const ICFGNode *, AbstractState > & getTrace()
AbstractState & getAbstractState(const ICFGNode *node)
Retrieve the abstract state for a given ICFG node. Asserts if absent.
IntervalValue getGepElementIndex(const GepStmt *gep)
Compute the flattened element index for a GepStmt.
Set< const ICFGNode * > getUseSitesOfObjVar(const ObjVar *obj, const ICFGNode *node) const
Given an ObjVar and its use-site ICFGNode, find all downstream use-site ICFGNodes.
AddressValue getGepObjAddrs(const ValVar *pointer, IntervalValue offset)
Compute GEP object addresses for a pointer at a given element offset.
AbstractValue loadValue(const ValVar *pointer, const ICFGNode *node)
const VarToAbsValMap & getVarToVal() const
get var2val map
void store(u32_t addr, const AbstractValue &val)
void initObjVar(const ObjVar *objVar)
AbstractState narrowing(const AbstractState &other)
domain narrow with other, and return the narrowed domain
AbstractState widening(const AbstractState &other)
domain widen with other, and return the widened domain
IntervalValue & getInterval()
AddressValue & getAddrs()
s64_t getIntNumeral() const
const FunObjVar * getFunction() const
Get function of this call node.
bool hasIndCSCallees(const CallICFGNode *cs) const
const FunctionSet & getIndCSCallees(const CallICFGNode *cs) const
const CallICFGNode * getOpCallICFGNode(u32_t op_idx) const
Return the CallICFGNode of the i-th operand.
@ ICMP_SGT
signed greater than
@ FCMP_UEQ
1 0 0 1 True if unordered or equal
@ FCMP_ONE
0 1 1 0 True if ordered and operands are unequal
@ ICMP_UGE
unsigned greater or equal
@ FCMP_UGT
1 0 1 0 True if unordered or greater than
@ ICMP_ULE
unsigned less or equal
@ FCMP_OGE
0 0 1 1 True if ordered and greater than or equal
@ FCMP_OLT
0 1 0 0 True if ordered and less than
@ FCMP_OGT
0 0 1 0 True if ordered and greater than
@ FCMP_TRUE
1 1 1 1 Always true (always folded)
@ ICMP_ULT
unsigned less than
@ FCMP_ULE
1 1 0 1 True if unordered, less than, or equal
@ ICMP_SLT
signed less than
@ ICMP_UGT
unsigned greater than
@ FCMP_OEQ
0 0 0 1 True if ordered and equal
@ FCMP_ORD
0 1 1 1 True if ordered (no nans)
@ FCMP_OLE
0 1 0 1 True if ordered and less than or equal
@ FCMP_FALSE
0 0 0 0 Always false (always folded)
@ FCMP_ULT
1 1 0 0 True if unordered or less than
@ FCMP_UNO
1 0 0 0 True if unordered: isnan(X) | isnan(Y)
@ FCMP_UGE
1 0 1 1 True if unordered, greater than, or equal
@ ICMP_SGE
signed greater or equal
@ FCMP_UNE
1 1 1 0 True if unordered or not equal
@ ICMP_SLE
signed less or equal
bool isDeclaration() const
iterator begin()
Iterators.
const GEdgeSetTy & getInEdges() const
const SVFStmtList & getSVFStmts() const
ICFGEdge * getICFGEdge(const ICFGNode *src, const ICFGNode *dst, ICFGEdge::ICFGEdgeK kind)
Get a SVFG edge according to src and dst.
void updateCallGraph(CallGraph *callgraph)
update ICFG for indirect calls
FunEntryICFGNode * getFunEntryICFGNode(const FunObjVar *fun)
Add a function entry node.
GlobalICFGNode * getGlobalICFGNode() const
void meet_with(const IntervalValue &other)
Return a intersected IntervalValue.
static BoundedInt minus_infinity()
Get minus infinity -inf.
bool is_zero() const
Return true if the IntervalValue is [0, 0].
static BoundedInt plus_infinity()
Get plus infinity +inf.
static IntervalValue top()
Create the IntervalValue [-inf, +inf].
const BoundedInt & lb() const
Return the lower bound.
const ValVar * getLHSVar() const
const ValVar * getRHSVar() const
const ValVar * getRes() const
Result SVFVar.
const ValVar * getOpVar(u32_t pos) const
Operand SVFVars.
u32_t getOpVarNum() const
static const OptionMap< u32_t > HandleRecur
recursion handling mode, Default: TOP
static const OptionMap< u32_t > AESparsity
static const Option< u32_t > WidenDelay
static const Option< bool > PStat
bool inSameCallGraphSCC(const FunObjVar *fun1, const FunObjVar *fun2)
Return TRUE if this edge is inside a PTACallGraph SCC, i.e., src node and dst node are in the same SC...
bool isInRecursion(const FunObjVar *fun) const
const Map< const FunObjVar *, const ICFGWTO * > & getFuncToWTO() const
Accessors for WTO data.
const Set< const ValVar * > getCycleValVars(const ICFGCycleWTO *cycle) const
AndersenWaveDiff * getPointerAnalysis() const
Accessors for Andersen's results.
void initWTO()
Build WTO for each function using call graph SCC.
CallGraph * getCallGraph() const
const ValVar * getRHSVar() const
const ValVar * getLHSVar() const
const CallSiteToFunPtrMap & getIndirectCallsites() const
Add/get indirect callsites.
const SVFVar * getSVFVar(NodeID id) const
ObjVar/GepObjVar/BaseObjVar.
static SVFIR * getPAG(bool buildFromFile=false)
Singleton design here to make sure we only have one instance during any analysis.
ICFGNode * getICFGNode() const
u32_t getByteSize() const
virtual const std::string & getName() const
const ValVar * getRHSVar() const
const ValVar * getLHSVar() const
bool isExtCall(const FunObjVar *fun)
WTONode< ICFG > ICFGSingletonWTO
llvm::IRBuilder IRBuilder
WTOComponent< ICFG > ICFGWTOComp