1 | //===- TypeMetadataUtils.cpp - Utilities related to type metadata ---------===// |
2 | // |
3 | // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. |
4 | // See https://llvm.org/LICENSE.txt for license information. |
5 | // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception |
6 | // |
7 | //===----------------------------------------------------------------------===// |
8 | // |
9 | // This file contains functions that make it easier to manipulate type metadata |
10 | // for devirtualization. |
11 | // |
12 | //===----------------------------------------------------------------------===// |
13 | |
14 | #include "llvm/Analysis/TypeMetadataUtils.h" |
15 | #include "llvm/IR/Constants.h" |
16 | #include "llvm/IR/Dominators.h" |
17 | #include "llvm/IR/Instructions.h" |
18 | #include "llvm/IR/IntrinsicInst.h" |
19 | #include "llvm/IR/Module.h" |
20 | |
21 | using namespace llvm; |
22 | |
23 | // Search for virtual calls that call FPtr and add them to DevirtCalls. |
24 | static void |
25 | findCallsAtConstantOffset(SmallVectorImpl<DevirtCallSite> &DevirtCalls, |
26 | bool *HasNonCallUses, Value *FPtr, uint64_t Offset, |
27 | const CallInst *CI, DominatorTree &DT) { |
28 | for (const Use &U : FPtr->uses()) { |
29 | Instruction *User = cast<Instruction>(Val: U.getUser()); |
30 | // Ignore this instruction if it is not dominated by the type intrinsic |
31 | // being analyzed. Otherwise we may transform a call sharing the same |
32 | // vtable pointer incorrectly. Specifically, this situation can arise |
33 | // after indirect call promotion and inlining, where we may have uses |
34 | // of the vtable pointer guarded by a function pointer check, and a fallback |
35 | // indirect call. |
36 | if (!DT.dominates(Def: CI, User)) |
37 | continue; |
38 | if (isa<BitCastInst>(Val: User)) { |
39 | findCallsAtConstantOffset(DevirtCalls, HasNonCallUses, FPtr: User, Offset, CI, |
40 | DT); |
41 | } else if (auto *CI = dyn_cast<CallInst>(Val: User)) { |
42 | DevirtCalls.push_back(Elt: {.Offset: Offset, .CB: *CI}); |
43 | } else if (auto *II = dyn_cast<InvokeInst>(Val: User)) { |
44 | DevirtCalls.push_back(Elt: {.Offset: Offset, .CB: *II}); |
45 | } else if (HasNonCallUses) { |
46 | *HasNonCallUses = true; |
47 | } |
48 | } |
49 | } |
50 | |
51 | // Search for virtual calls that load from VPtr and add them to DevirtCalls. |
52 | static void findLoadCallsAtConstantOffset( |
53 | const Module *M, SmallVectorImpl<DevirtCallSite> &DevirtCalls, Value *VPtr, |
54 | int64_t Offset, const CallInst *CI, DominatorTree &DT) { |
55 | for (const Use &U : VPtr->uses()) { |
56 | Value *User = U.getUser(); |
57 | if (isa<BitCastInst>(Val: User)) { |
58 | findLoadCallsAtConstantOffset(M, DevirtCalls, VPtr: User, Offset, CI, DT); |
59 | } else if (isa<LoadInst>(Val: User)) { |
60 | findCallsAtConstantOffset(DevirtCalls, HasNonCallUses: nullptr, FPtr: User, Offset, CI, DT); |
61 | } else if (auto GEP = dyn_cast<GetElementPtrInst>(Val: User)) { |
62 | // Take into account the GEP offset. |
63 | if (VPtr == GEP->getPointerOperand() && GEP->hasAllConstantIndices()) { |
64 | SmallVector<Value *, 8> Indices(drop_begin(RangeOrContainer: GEP->operands())); |
65 | int64_t GEPOffset = M->getDataLayout().getIndexedOffsetInType( |
66 | ElemTy: GEP->getSourceElementType(), Indices); |
67 | findLoadCallsAtConstantOffset(M, DevirtCalls, VPtr: User, Offset: Offset + GEPOffset, |
68 | CI, DT); |
69 | } |
70 | } |
71 | } |
72 | } |
73 | |
74 | void llvm::findDevirtualizableCallsForTypeTest( |
75 | SmallVectorImpl<DevirtCallSite> &DevirtCalls, |
76 | SmallVectorImpl<CallInst *> &Assumes, const CallInst *CI, |
77 | DominatorTree &DT) { |
78 | assert(CI->getCalledFunction()->getIntrinsicID() == Intrinsic::type_test || |
79 | CI->getCalledFunction()->getIntrinsicID() == |
80 | Intrinsic::public_type_test); |
81 | |
82 | const Module *M = CI->getParent()->getParent()->getParent(); |
83 | |
84 | // Find llvm.assume intrinsics for this llvm.type.test call. |
85 | for (const Use &CIU : CI->uses()) |
86 | if (auto *Assume = dyn_cast<AssumeInst>(Val: CIU.getUser())) |
87 | Assumes.push_back(Elt: Assume); |
88 | |
89 | // If we found any, search for virtual calls based on %p and add them to |
90 | // DevirtCalls. |
91 | if (!Assumes.empty()) |
92 | findLoadCallsAtConstantOffset( |
93 | M, DevirtCalls, VPtr: CI->getArgOperand(i: 0)->stripPointerCasts(), Offset: 0, CI, DT); |
94 | } |
95 | |
96 | void llvm::findDevirtualizableCallsForTypeCheckedLoad( |
97 | SmallVectorImpl<DevirtCallSite> &DevirtCalls, |
98 | SmallVectorImpl<Instruction *> &LoadedPtrs, |
99 | SmallVectorImpl<Instruction *> &Preds, bool &HasNonCallUses, |
100 | const CallInst *CI, DominatorTree &DT) { |
101 | assert(CI->getCalledFunction()->getIntrinsicID() == |
102 | Intrinsic::type_checked_load || |
103 | CI->getCalledFunction()->getIntrinsicID() == |
104 | Intrinsic::type_checked_load_relative); |
105 | |
106 | auto *Offset = dyn_cast<ConstantInt>(Val: CI->getArgOperand(i: 1)); |
107 | if (!Offset) { |
108 | HasNonCallUses = true; |
109 | return; |
110 | } |
111 | |
112 | for (const Use &U : CI->uses()) { |
113 | auto CIU = U.getUser(); |
114 | if (auto EVI = dyn_cast<ExtractValueInst>(Val: CIU)) { |
115 | if (EVI->getNumIndices() == 1 && EVI->getIndices()[0] == 0) { |
116 | LoadedPtrs.push_back(Elt: EVI); |
117 | continue; |
118 | } |
119 | if (EVI->getNumIndices() == 1 && EVI->getIndices()[0] == 1) { |
120 | Preds.push_back(Elt: EVI); |
121 | continue; |
122 | } |
123 | } |
124 | HasNonCallUses = true; |
125 | } |
126 | |
127 | for (Value *LoadedPtr : LoadedPtrs) |
128 | findCallsAtConstantOffset(DevirtCalls, HasNonCallUses: &HasNonCallUses, FPtr: LoadedPtr, |
129 | Offset: Offset->getZExtValue(), CI, DT); |
130 | } |
131 | |
132 | Constant *llvm::getPointerAtOffset(Constant *I, uint64_t Offset, Module &M, |
133 | Constant *TopLevelGlobal) { |
134 | if (I->getType()->isPointerTy()) { |
135 | if (Offset == 0) |
136 | return I; |
137 | return nullptr; |
138 | } |
139 | |
140 | const DataLayout &DL = M.getDataLayout(); |
141 | |
142 | if (auto *C = dyn_cast<ConstantStruct>(Val: I)) { |
143 | const StructLayout *SL = DL.getStructLayout(Ty: C->getType()); |
144 | if (Offset >= SL->getSizeInBytes()) |
145 | return nullptr; |
146 | |
147 | unsigned Op = SL->getElementContainingOffset(FixedOffset: Offset); |
148 | return getPointerAtOffset(I: cast<Constant>(Val: I->getOperand(i: Op)), |
149 | Offset: Offset - SL->getElementOffset(Idx: Op), M, |
150 | TopLevelGlobal); |
151 | } |
152 | if (auto *C = dyn_cast<ConstantArray>(Val: I)) { |
153 | ArrayType *VTableTy = C->getType(); |
154 | uint64_t ElemSize = DL.getTypeAllocSize(Ty: VTableTy->getElementType()); |
155 | |
156 | unsigned Op = Offset / ElemSize; |
157 | if (Op >= C->getNumOperands()) |
158 | return nullptr; |
159 | |
160 | return getPointerAtOffset(I: cast<Constant>(Val: I->getOperand(i: Op)), |
161 | Offset: Offset % ElemSize, M, TopLevelGlobal); |
162 | } |
163 | |
164 | // (Swift-specific) relative-pointer support starts here. |
165 | if (auto *CI = dyn_cast<ConstantInt>(Val: I)) { |
166 | if (Offset == 0 && CI->isZero()) { |
167 | return I; |
168 | } |
169 | } |
170 | if (auto *C = dyn_cast<ConstantExpr>(Val: I)) { |
171 | switch (C->getOpcode()) { |
172 | case Instruction::Trunc: |
173 | case Instruction::PtrToInt: |
174 | return getPointerAtOffset(I: cast<Constant>(Val: C->getOperand(i_nocapture: 0)), Offset, M, |
175 | TopLevelGlobal); |
176 | case Instruction::Sub: { |
177 | auto *Operand0 = cast<Constant>(Val: C->getOperand(i_nocapture: 0)); |
178 | auto *Operand1 = cast<Constant>(Val: C->getOperand(i_nocapture: 1)); |
179 | |
180 | auto StripGEP = [](Constant *C) { |
181 | auto *CE = dyn_cast<ConstantExpr>(Val: C); |
182 | if (!CE) |
183 | return C; |
184 | if (CE->getOpcode() != Instruction::GetElementPtr) |
185 | return C; |
186 | return CE->getOperand(i_nocapture: 0); |
187 | }; |
188 | auto *Operand1TargetGlobal = StripGEP(getPointerAtOffset(I: Operand1, Offset: 0, M)); |
189 | |
190 | // Check that in the "sub (@a, @b)" expression, @b points back to the top |
191 | // level global (or a GEP thereof) that we're processing. Otherwise bail. |
192 | if (Operand1TargetGlobal != TopLevelGlobal) |
193 | return nullptr; |
194 | |
195 | return getPointerAtOffset(I: Operand0, Offset, M, TopLevelGlobal); |
196 | } |
197 | default: |
198 | return nullptr; |
199 | } |
200 | } |
201 | return nullptr; |
202 | } |
203 | |
204 | std::pair<Function *, Constant *> |
205 | llvm::getFunctionAtVTableOffset(GlobalVariable *GV, uint64_t Offset, |
206 | Module &M) { |
207 | Constant *Ptr = getPointerAtOffset(I: GV->getInitializer(), Offset, M, TopLevelGlobal: GV); |
208 | if (!Ptr) |
209 | return std::pair<Function *, Constant *>(nullptr, nullptr); |
210 | |
211 | auto C = Ptr->stripPointerCasts(); |
212 | // Make sure this is a function or alias to a function. |
213 | auto Fn = dyn_cast<Function>(Val: C); |
214 | auto A = dyn_cast<GlobalAlias>(Val: C); |
215 | if (!Fn && A) |
216 | Fn = dyn_cast<Function>(Val: A->getAliasee()); |
217 | |
218 | if (!Fn) |
219 | return std::pair<Function *, Constant *>(nullptr, nullptr); |
220 | |
221 | return std::pair<Function *, Constant *>(Fn, C); |
222 | } |
223 | |
224 | void llvm::replaceRelativePointerUsersWithZero(Function *F) { |
225 | for (auto *U : F->users()) { |
226 | auto *PtrExpr = dyn_cast<ConstantExpr>(Val: U); |
227 | if (!PtrExpr || PtrExpr->getOpcode() != Instruction::PtrToInt) |
228 | continue; |
229 | |
230 | for (auto *PtrToIntUser : PtrExpr->users()) { |
231 | auto *SubExpr = dyn_cast<ConstantExpr>(Val: PtrToIntUser); |
232 | if (!SubExpr || SubExpr->getOpcode() != Instruction::Sub) |
233 | continue; |
234 | |
235 | SubExpr->replaceNonMetadataUsesWith( |
236 | V: ConstantInt::get(Ty: SubExpr->getType(), V: 0)); |
237 | } |
238 | } |
239 | } |
240 | |