2009-06-02 17:52:33 +00:00
|
|
|
//===--- CaptureTracking.cpp - Determine whether a pointer is captured ----===//
|
|
|
|
//
|
|
|
|
// The LLVM Compiler Infrastructure
|
|
|
|
//
|
|
|
|
// This file is distributed under the University of Illinois Open Source
|
|
|
|
// License. See LICENSE.TXT for details.
|
|
|
|
//
|
|
|
|
//===----------------------------------------------------------------------===//
|
|
|
|
//
|
|
|
|
// This file contains routines that help determine which pointers are captured.
|
|
|
|
// A pointer value is captured if the function makes a copy of any part of the
|
|
|
|
// pointer that outlives the call. Not being captured means, more or less, that
|
|
|
|
// the pointer is only dereferenced and not stored in a global. Returning part
|
|
|
|
// of the pointer as the function return value may or may not count as capturing
|
|
|
|
// the pointer, depending on the context.
|
|
|
|
//
|
|
|
|
//===----------------------------------------------------------------------===//
|
|
|
|
|
|
|
|
#include "llvm/ADT/SmallSet.h"
|
|
|
|
#include "llvm/ADT/SmallVector.h"
|
2013-04-08 18:41:23 +00:00
|
|
|
#include "llvm/Analysis/AliasAnalysis.h"
|
2014-11-24 09:08:18 +00:00
|
|
|
#include "llvm/Analysis/CFG.h"
|
2015-01-18 16:17:27 +00:00
|
|
|
#include "llvm/Analysis/CaptureTracking.h"
|
2015-12-30 11:46:15 +00:00
|
|
|
#include "llvm/Analysis/OrderedBasicBlock.h"
|
2014-11-24 09:08:18 +00:00
|
|
|
#include "llvm/IR/CallSite.h"
|
2013-04-08 18:41:23 +00:00
|
|
|
#include "llvm/IR/Constants.h"
|
2014-11-24 09:08:18 +00:00
|
|
|
#include "llvm/IR/Dominators.h"
|
2013-04-08 18:41:23 +00:00
|
|
|
#include "llvm/IR/Instructions.h"
|
2016-07-23 20:41:05 +00:00
|
|
|
#include "llvm/IR/IntrinsicInst.h"
|
2013-04-08 18:41:23 +00:00
|
|
|
|
2009-06-02 17:52:33 +00:00
|
|
|
using namespace llvm;
|
|
|
|
|
2012-04-14 13:54:10 +00:00
|
|
|
CaptureTracker::~CaptureTracker() {}
|
|
|
|
|
2014-11-24 09:08:18 +00:00
|
|
|
bool CaptureTracker::shouldExplore(const Use *U) { return true; }
|
2012-12-02 13:10:19 +00:00
|
|
|
|
2012-04-14 13:54:10 +00:00
|
|
|
namespace {
|
|
|
|
struct SimpleCaptureTracker : public CaptureTracker {
|
|
|
|
explicit SimpleCaptureTracker(bool ReturnCaptures)
|
|
|
|
: ReturnCaptures(ReturnCaptures), Captured(false) {}
|
|
|
|
|
2014-11-24 09:08:18 +00:00
|
|
|
void tooManyUses() override { Captured = true; }
|
2012-04-14 13:54:10 +00:00
|
|
|
|
2014-11-24 09:08:18 +00:00
|
|
|
bool captured(const Use *U) override {
|
2012-04-14 13:54:10 +00:00
|
|
|
if (isa<ReturnInst>(U->getUser()) && !ReturnCaptures)
|
2012-08-15 19:34:23 +00:00
|
|
|
return false;
|
2012-04-14 13:54:10 +00:00
|
|
|
|
|
|
|
Captured = true;
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
bool ReturnCaptures;
|
|
|
|
|
|
|
|
bool Captured;
|
|
|
|
};
|
2014-11-24 09:08:18 +00:00
|
|
|
|
|
|
|
/// Only find pointer captures which happen before the given instruction. Uses
|
|
|
|
/// the dominator tree to determine whether one instruction is before another.
|
|
|
|
/// Only support the case where the Value is defined in the same basic block
|
|
|
|
/// as the given instruction and the use.
|
|
|
|
struct CapturesBefore : public CaptureTracker {
|
2015-07-05 14:21:36 +00:00
|
|
|
|
2014-11-24 09:08:18 +00:00
|
|
|
CapturesBefore(bool ReturnCaptures, const Instruction *I, DominatorTree *DT,
|
2015-12-30 11:46:15 +00:00
|
|
|
bool IncludeI, OrderedBasicBlock *IC)
|
|
|
|
: OrderedBB(IC), BeforeHere(I), DT(DT),
|
2015-07-05 14:21:36 +00:00
|
|
|
ReturnCaptures(ReturnCaptures), IncludeI(IncludeI), Captured(false) {}
|
2014-11-24 09:08:18 +00:00
|
|
|
|
|
|
|
void tooManyUses() override { Captured = true; }
|
|
|
|
|
2015-07-05 14:21:36 +00:00
|
|
|
bool isSafeToPrune(Instruction *I) {
|
2014-11-24 09:08:18 +00:00
|
|
|
BasicBlock *BB = I->getParent();
|
|
|
|
// We explore this usage only if the usage can reach "BeforeHere".
|
|
|
|
// If use is not reachable from entry, there is no need to explore.
|
|
|
|
if (BeforeHere != I && !DT->isReachableFromEntry(BB))
|
2015-07-05 14:21:36 +00:00
|
|
|
return true;
|
|
|
|
|
|
|
|
// Compute the case where both instructions are inside the same basic
|
|
|
|
// block. Since instructions in the same BB as BeforeHere are numbered in
|
2015-12-30 11:46:15 +00:00
|
|
|
// 'OrderedBB', avoid using 'dominates' and 'isPotentiallyReachable'
|
2015-07-05 14:21:36 +00:00
|
|
|
// which are very expensive for large basic blocks.
|
|
|
|
if (BB == BeforeHere->getParent()) {
|
|
|
|
// 'I' dominates 'BeforeHere' => not safe to prune.
|
|
|
|
//
|
2015-12-30 11:46:15 +00:00
|
|
|
// The value defined by an invoke dominates an instruction only
|
|
|
|
// if it dominates every instruction in UseBB. A PHI is dominated only
|
|
|
|
// if the instruction dominates every possible use in the UseBB. Since
|
2015-07-05 14:21:36 +00:00
|
|
|
// UseBB == BB, avoid pruning.
|
|
|
|
if (isa<InvokeInst>(BeforeHere) || isa<PHINode>(I) || I == BeforeHere)
|
|
|
|
return false;
|
2015-12-30 11:46:15 +00:00
|
|
|
if (!OrderedBB->dominates(BeforeHere, I))
|
2015-07-05 14:21:36 +00:00
|
|
|
return false;
|
|
|
|
|
|
|
|
// 'BeforeHere' comes before 'I', it's safe to prune if we also
|
|
|
|
// guarantee that 'I' never reaches 'BeforeHere' through a back-edge or
|
|
|
|
// by its successors, i.e, prune if:
|
|
|
|
//
|
|
|
|
// (1) BB is an entry block or have no sucessors.
|
|
|
|
// (2) There's no path coming back through BB sucessors.
|
|
|
|
if (BB == &BB->getParent()->getEntryBlock() ||
|
|
|
|
!BB->getTerminator()->getNumSuccessors())
|
|
|
|
return true;
|
|
|
|
|
|
|
|
SmallVector<BasicBlock*, 32> Worklist;
|
|
|
|
Worklist.append(succ_begin(BB), succ_end(BB));
|
2015-12-30 11:46:15 +00:00
|
|
|
return !isPotentiallyReachableFromMany(Worklist, BB, DT);
|
2015-07-05 14:21:36 +00:00
|
|
|
}
|
|
|
|
|
2014-11-24 09:08:18 +00:00
|
|
|
// If the value is defined in the same basic block as use and BeforeHere,
|
|
|
|
// there is no need to explore the use if BeforeHere dominates use.
|
|
|
|
// Check whether there is a path from I to BeforeHere.
|
|
|
|
if (BeforeHere != I && DT->dominates(BeforeHere, I) &&
|
|
|
|
!isPotentiallyReachable(I, BeforeHere, DT))
|
2015-07-05 14:21:36 +00:00
|
|
|
return true;
|
|
|
|
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
bool shouldExplore(const Use *U) override {
|
|
|
|
Instruction *I = cast<Instruction>(U->getUser());
|
|
|
|
|
|
|
|
if (BeforeHere == I && !IncludeI)
|
|
|
|
return false;
|
|
|
|
|
|
|
|
if (isSafeToPrune(I))
|
2014-11-24 09:08:18 +00:00
|
|
|
return false;
|
2015-07-05 14:21:36 +00:00
|
|
|
|
2014-11-24 09:08:18 +00:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
bool captured(const Use *U) override {
|
|
|
|
if (isa<ReturnInst>(U->getUser()) && !ReturnCaptures)
|
|
|
|
return false;
|
|
|
|
|
2015-07-05 14:21:36 +00:00
|
|
|
if (!shouldExplore(U))
|
2014-11-24 09:08:18 +00:00
|
|
|
return false;
|
|
|
|
|
|
|
|
Captured = true;
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2015-12-30 11:46:15 +00:00
|
|
|
OrderedBasicBlock *OrderedBB;
|
2014-11-24 09:08:18 +00:00
|
|
|
const Instruction *BeforeHere;
|
|
|
|
DominatorTree *DT;
|
|
|
|
|
|
|
|
bool ReturnCaptures;
|
|
|
|
bool IncludeI;
|
|
|
|
|
|
|
|
bool Captured;
|
|
|
|
};
|
2015-07-05 14:21:36 +00:00
|
|
|
}
|
2009-12-15 18:09:07 +00:00
|
|
|
|
2009-06-02 17:52:33 +00:00
|
|
|
/// PointerMayBeCaptured - Return true if this pointer value may be captured
|
|
|
|
/// by the enclosing function (which is required to exist). This routine can
|
|
|
|
/// be expensive, so consider caching the results. The boolean ReturnCaptures
|
|
|
|
/// specifies whether returning the value (or part of it) from the function
|
2009-12-01 11:07:05 +00:00
|
|
|
/// counts as capturing it or not. The boolean StoreCaptures specified whether
|
|
|
|
/// storing the value (or part of it) into memory anywhere automatically
|
2009-06-02 17:52:33 +00:00
|
|
|
/// counts as capturing it or not.
|
2009-12-01 11:07:05 +00:00
|
|
|
bool llvm::PointerMayBeCaptured(const Value *V,
|
|
|
|
bool ReturnCaptures, bool StoreCaptures) {
|
2012-04-14 13:54:10 +00:00
|
|
|
assert(!isa<GlobalValue>(V) &&
|
|
|
|
"It doesn't make sense to ask whether a global is captured.");
|
|
|
|
|
|
|
|
// TODO: If StoreCaptures is not true, we could do Fancy analysis
|
|
|
|
// to determine whether this store is not actually an escape point.
|
|
|
|
// In that case, BasicAliasAnalysis should be updated as well to
|
|
|
|
// take advantage of this.
|
|
|
|
(void)StoreCaptures;
|
|
|
|
|
|
|
|
SimpleCaptureTracker SCT(ReturnCaptures);
|
|
|
|
PointerMayBeCaptured(V, &SCT);
|
|
|
|
return SCT.Captured;
|
|
|
|
}
|
|
|
|
|
2014-11-24 09:08:18 +00:00
|
|
|
/// PointerMayBeCapturedBefore - Return true if this pointer value may be
|
|
|
|
/// captured by the enclosing function (which is required to exist). If a
|
|
|
|
/// DominatorTree is provided, only captures which happen before the given
|
|
|
|
/// instruction are considered. This routine can be expensive, so consider
|
|
|
|
/// caching the results. The boolean ReturnCaptures specifies whether
|
|
|
|
/// returning the value (or part of it) from the function counts as capturing
|
|
|
|
/// it or not. The boolean StoreCaptures specified whether storing the value
|
|
|
|
/// (or part of it) into memory anywhere automatically counts as capturing it
|
2015-12-30 11:46:15 +00:00
|
|
|
/// or not. A ordered basic block \p OBB can be used in order to speed up
|
|
|
|
/// queries about relative order among instructions in the same basic block.
|
2014-11-24 09:08:18 +00:00
|
|
|
bool llvm::PointerMayBeCapturedBefore(const Value *V, bool ReturnCaptures,
|
|
|
|
bool StoreCaptures, const Instruction *I,
|
2015-12-30 11:46:15 +00:00
|
|
|
DominatorTree *DT, bool IncludeI,
|
|
|
|
OrderedBasicBlock *OBB) {
|
2014-11-24 09:08:18 +00:00
|
|
|
assert(!isa<GlobalValue>(V) &&
|
|
|
|
"It doesn't make sense to ask whether a global is captured.");
|
2015-12-30 11:46:15 +00:00
|
|
|
bool UseNewOBB = OBB == nullptr;
|
2014-11-24 09:08:18 +00:00
|
|
|
|
|
|
|
if (!DT)
|
|
|
|
return PointerMayBeCaptured(V, ReturnCaptures, StoreCaptures);
|
2015-12-30 11:46:15 +00:00
|
|
|
if (UseNewOBB)
|
|
|
|
OBB = new OrderedBasicBlock(I->getParent());
|
2014-11-24 09:08:18 +00:00
|
|
|
|
|
|
|
// TODO: See comment in PointerMayBeCaptured regarding what could be done
|
|
|
|
// with StoreCaptures.
|
|
|
|
|
2015-12-30 11:46:15 +00:00
|
|
|
CapturesBefore CB(ReturnCaptures, I, DT, IncludeI, OBB);
|
2014-11-24 09:08:18 +00:00
|
|
|
PointerMayBeCaptured(V, &CB);
|
2015-12-30 11:46:15 +00:00
|
|
|
|
|
|
|
if (UseNewOBB)
|
|
|
|
delete OBB;
|
2014-11-24 09:08:18 +00:00
|
|
|
return CB.Captured;
|
|
|
|
}
|
|
|
|
|
2012-04-14 13:54:10 +00:00
|
|
|
/// TODO: Write a new FunctionPass AliasAnalysis so that it can keep
|
|
|
|
/// a cache. Then we can move the code from BasicAliasAnalysis into
|
|
|
|
/// that path, and remove this threshold.
|
|
|
|
static int const Threshold = 20;
|
|
|
|
|
|
|
|
void llvm::PointerMayBeCaptured(const Value *V, CaptureTracker *Tracker) {
|
2010-03-03 17:27:15 +00:00
|
|
|
assert(V->getType()->isPointerTy() && "Capture is for pointers only!");
|
2014-11-24 09:08:18 +00:00
|
|
|
SmallVector<const Use *, Threshold> Worklist;
|
|
|
|
SmallSet<const Use *, Threshold> Visited;
|
2009-12-15 18:09:07 +00:00
|
|
|
int Count = 0;
|
2009-06-02 17:52:33 +00:00
|
|
|
|
2014-11-24 09:08:18 +00:00
|
|
|
for (const Use &U : V->uses()) {
|
2009-12-15 18:09:07 +00:00
|
|
|
// If there are lots of uses, conservatively say that the value
|
|
|
|
// is captured to avoid taking too much compile time.
|
|
|
|
if (Count++ >= Threshold)
|
2012-04-14 13:54:10 +00:00
|
|
|
return Tracker->tooManyUses();
|
2009-12-15 18:09:07 +00:00
|
|
|
|
2014-11-24 09:08:18 +00:00
|
|
|
if (!Tracker->shouldExplore(&U)) continue;
|
|
|
|
Visited.insert(&U);
|
|
|
|
Worklist.push_back(&U);
|
2009-06-02 17:52:33 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
while (!Worklist.empty()) {
|
2014-11-24 09:08:18 +00:00
|
|
|
const Use *U = Worklist.pop_back_val();
|
2009-06-02 17:52:33 +00:00
|
|
|
Instruction *I = cast<Instruction>(U->getUser());
|
|
|
|
V = U->get();
|
|
|
|
|
|
|
|
switch (I->getOpcode()) {
|
|
|
|
case Instruction::Call:
|
|
|
|
case Instruction::Invoke: {
|
2010-09-17 15:48:55 +00:00
|
|
|
CallSite CS(I);
|
2009-06-02 17:52:33 +00:00
|
|
|
// Not captured if the callee is readonly, doesn't return a copy through
|
|
|
|
// its return value and doesn't unwind (a readonly function can leak bits
|
|
|
|
// by throwing an exception or not depending on the input value).
|
2009-12-01 11:07:05 +00:00
|
|
|
if (CS.onlyReadsMemory() && CS.doesNotThrow() && I->getType()->isVoidTy())
|
2009-06-02 17:52:33 +00:00
|
|
|
break;
|
|
|
|
|
2016-07-23 20:41:05 +00:00
|
|
|
// Volatile operations effectively capture the memory location that they
|
|
|
|
// load and store to.
|
|
|
|
if (auto *MI = dyn_cast<MemIntrinsic>(I))
|
|
|
|
if (MI->isVolatile())
|
|
|
|
if (Tracker->captured(U))
|
|
|
|
return;
|
|
|
|
|
2009-06-02 17:52:33 +00:00
|
|
|
// Not captured if only passed via 'nocapture' arguments. Note that
|
|
|
|
// calling a function pointer does not in itself cause the pointer to
|
|
|
|
// be captured. This is a subtle point considering that (for example)
|
|
|
|
// the callee might return its own address. It is analogous to saying
|
|
|
|
// that loading a value from a pointer does not cause the pointer to be
|
|
|
|
// captured, even though the loaded value might be the pointer itself
|
|
|
|
// (think of self-referential objects).
|
2015-12-30 11:46:15 +00:00
|
|
|
CallSite::data_operand_iterator B =
|
|
|
|
CS.data_operands_begin(), E = CS.data_operands_end();
|
|
|
|
for (CallSite::data_operand_iterator A = B; A != E; ++A)
|
2012-04-14 13:54:10 +00:00
|
|
|
if (A->get() == V && !CS.doesNotCapture(A - B))
|
2009-06-02 17:52:33 +00:00
|
|
|
// The parameter is not marked 'nocapture' - captured.
|
2012-04-14 13:54:10 +00:00
|
|
|
if (Tracker->captured(U))
|
|
|
|
return;
|
2009-06-02 17:52:33 +00:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
case Instruction::Load:
|
2016-07-23 20:41:05 +00:00
|
|
|
// Volatile loads make the address observable.
|
|
|
|
if (cast<LoadInst>(I)->isVolatile())
|
|
|
|
if (Tracker->captured(U))
|
|
|
|
return;
|
2009-06-02 17:52:33 +00:00
|
|
|
break;
|
2011-02-20 12:57:14 +00:00
|
|
|
case Instruction::VAArg:
|
|
|
|
// "va-arg" from a pointer does not cause it to be captured.
|
|
|
|
break;
|
2009-06-02 17:52:33 +00:00
|
|
|
case Instruction::Store:
|
2009-12-01 11:07:05 +00:00
|
|
|
// Stored the pointer - conservatively assume it may be captured.
|
2016-07-23 20:41:05 +00:00
|
|
|
// Volatile stores make the address observable.
|
|
|
|
if (V == I->getOperand(0) || cast<StoreInst>(I)->isVolatile())
|
|
|
|
if (Tracker->captured(U))
|
|
|
|
return;
|
|
|
|
break;
|
|
|
|
case Instruction::AtomicRMW: {
|
|
|
|
// atomicrmw conceptually includes both a load and store from
|
|
|
|
// the same location.
|
|
|
|
// As with a store, the location being accessed is not captured,
|
|
|
|
// but the value being stored is.
|
|
|
|
// Volatile stores make the address observable.
|
|
|
|
auto *ARMWI = cast<AtomicRMWInst>(I);
|
|
|
|
if (ARMWI->getValOperand() == V || ARMWI->isVolatile())
|
2012-04-14 13:54:10 +00:00
|
|
|
if (Tracker->captured(U))
|
|
|
|
return;
|
2009-06-02 17:52:33 +00:00
|
|
|
break;
|
2016-07-23 20:41:05 +00:00
|
|
|
}
|
|
|
|
case Instruction::AtomicCmpXchg: {
|
|
|
|
// cmpxchg conceptually includes both a load and store from
|
|
|
|
// the same location.
|
|
|
|
// As with a store, the location being accessed is not captured,
|
|
|
|
// but the value being stored is.
|
|
|
|
// Volatile stores make the address observable.
|
|
|
|
auto *ACXI = cast<AtomicCmpXchgInst>(I);
|
|
|
|
if (ACXI->getCompareOperand() == V || ACXI->getNewValOperand() == V ||
|
|
|
|
ACXI->isVolatile())
|
|
|
|
if (Tracker->captured(U))
|
|
|
|
return;
|
|
|
|
break;
|
|
|
|
}
|
2009-06-02 17:52:33 +00:00
|
|
|
case Instruction::BitCast:
|
|
|
|
case Instruction::GetElementPtr:
|
|
|
|
case Instruction::PHI:
|
|
|
|
case Instruction::Select:
|
2014-11-24 09:08:18 +00:00
|
|
|
case Instruction::AddrSpaceCast:
|
2009-06-02 17:52:33 +00:00
|
|
|
// The original value is not captured via this if the new value isn't.
|
2013-12-22 00:04:03 +00:00
|
|
|
Count = 0;
|
2014-11-24 09:08:18 +00:00
|
|
|
for (Use &UU : I->uses()) {
|
2013-12-22 00:04:03 +00:00
|
|
|
// If there are lots of uses, conservatively say that the value
|
|
|
|
// is captured to avoid taking too much compile time.
|
|
|
|
if (Count++ >= Threshold)
|
|
|
|
return Tracker->tooManyUses();
|
|
|
|
|
2015-01-18 16:17:27 +00:00
|
|
|
if (Visited.insert(&UU).second)
|
2014-11-24 09:08:18 +00:00
|
|
|
if (Tracker->shouldExplore(&UU))
|
|
|
|
Worklist.push_back(&UU);
|
2009-06-02 17:52:33 +00:00
|
|
|
}
|
|
|
|
break;
|
2016-07-23 20:41:05 +00:00
|
|
|
case Instruction::ICmp: {
|
2009-12-01 11:07:05 +00:00
|
|
|
// Don't count comparisons of a no-alias return value against null as
|
|
|
|
// captures. This allows us to ignore comparisons of malloc results
|
|
|
|
// with null, for example.
|
2013-12-22 00:04:03 +00:00
|
|
|
if (ConstantPointerNull *CPN =
|
|
|
|
dyn_cast<ConstantPointerNull>(I->getOperand(1)))
|
|
|
|
if (CPN->getType()->getAddressSpace() == 0)
|
|
|
|
if (isNoAliasCall(V->stripPointerCasts()))
|
2009-12-01 11:07:05 +00:00
|
|
|
break;
|
2016-07-23 20:41:05 +00:00
|
|
|
// Comparison against value stored in global variable. Given the pointer
|
|
|
|
// does not escape, its value cannot be guessed and stored separately in a
|
|
|
|
// global variable.
|
|
|
|
unsigned OtherIndex = (I->getOperand(0) == V) ? 1 : 0;
|
|
|
|
auto *LI = dyn_cast<LoadInst>(I->getOperand(OtherIndex));
|
|
|
|
if (LI && isa<GlobalVariable>(LI->getPointerOperand()))
|
|
|
|
break;
|
2009-12-01 11:07:05 +00:00
|
|
|
// Otherwise, be conservative. There are crazy ways to capture pointers
|
|
|
|
// using comparisons.
|
2012-04-14 13:54:10 +00:00
|
|
|
if (Tracker->captured(U))
|
|
|
|
return;
|
|
|
|
break;
|
2016-07-23 20:41:05 +00:00
|
|
|
}
|
2009-06-02 17:52:33 +00:00
|
|
|
default:
|
|
|
|
// Something else - be conservative and say it is captured.
|
2012-04-14 13:54:10 +00:00
|
|
|
if (Tracker->captured(U))
|
|
|
|
return;
|
|
|
|
break;
|
2009-06-02 17:52:33 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-04-14 13:54:10 +00:00
|
|
|
// All uses examined.
|
2009-06-02 17:52:33 +00:00
|
|
|
}
|