1//===--- CGVTables.cpp - Emit LLVM Code for C++ vtables -------------------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This contains code dealing with C++ code generation of virtual tables.
10//
11//===----------------------------------------------------------------------===//
12
13#include "CGCXXABI.h"
14#include "CodeGenFunction.h"
15#include "CodeGenModule.h"
16#include "clang/AST/Attr.h"
17#include "clang/AST/CXXInheritance.h"
18#include "clang/AST/RecordLayout.h"
19#include "clang/Basic/CodeGenOptions.h"
20#include "clang/CodeGen/CGFunctionInfo.h"
21#include "clang/CodeGen/ConstantInitBuilder.h"
22#include "llvm/IR/IntrinsicInst.h"
23#include "llvm/Support/Format.h"
24#include "llvm/Transforms/Utils/Cloning.h"
25#include <algorithm>
26#include <cstdio>
27#include <utility>
28
29using namespace clang;
30using namespace CodeGen;
31
32CodeGenVTables::CodeGenVTables(CodeGenModule &CGM)
33 : CGM(CGM), VTContext(CGM.getContext().getVTableContext()) {}
34
35llvm::Constant *CodeGenModule::GetAddrOfThunk(StringRef Name, llvm::Type *FnTy,
36 GlobalDecl GD) {
37 return GetOrCreateLLVMFunction(MangledName: Name, Ty: FnTy, D: GD, /*ForVTable=*/true,
38 /*DontDefer=*/true, /*IsThunk=*/true);
39}
40
41static void setThunkProperties(CodeGenModule &CGM, const ThunkInfo &Thunk,
42 llvm::Function *ThunkFn, bool ForVTable,
43 GlobalDecl GD) {
44 CGM.setFunctionLinkage(GD, F: ThunkFn);
45 CGM.getCXXABI().setThunkLinkage(Thunk: ThunkFn, ForVTable, GD,
46 ReturnAdjustment: !Thunk.Return.isEmpty());
47
48 // Set the right visibility.
49 CGM.setGVProperties(GV: ThunkFn, GD);
50
51 if (!CGM.getCXXABI().exportThunk()) {
52 ThunkFn->setDLLStorageClass(llvm::GlobalValue::DefaultStorageClass);
53 ThunkFn->setDSOLocal(true);
54 }
55
56 if (CGM.supportsCOMDAT() && ThunkFn->isWeakForLinker())
57 ThunkFn->setComdat(CGM.getModule().getOrInsertComdat(Name: ThunkFn->getName()));
58}
59
60#ifndef NDEBUG
61static bool similar(const ABIArgInfo &infoL, CanQualType typeL,
62 const ABIArgInfo &infoR, CanQualType typeR) {
63 return (infoL.getKind() == infoR.getKind() &&
64 (typeL == typeR ||
65 (isa<PointerType>(Val: typeL) && isa<PointerType>(Val: typeR)) ||
66 (isa<ReferenceType>(Val: typeL) && isa<ReferenceType>(Val: typeR))));
67}
68#endif
69
70static RValue PerformReturnAdjustment(CodeGenFunction &CGF,
71 QualType ResultType, RValue RV,
72 const ThunkInfo &Thunk) {
73 // Emit the return adjustment.
74 bool NullCheckValue = !ResultType->isReferenceType();
75
76 llvm::BasicBlock *AdjustNull = nullptr;
77 llvm::BasicBlock *AdjustNotNull = nullptr;
78 llvm::BasicBlock *AdjustEnd = nullptr;
79
80 llvm::Value *ReturnValue = RV.getScalarVal();
81
82 if (NullCheckValue) {
83 AdjustNull = CGF.createBasicBlock(name: "adjust.null");
84 AdjustNotNull = CGF.createBasicBlock(name: "adjust.notnull");
85 AdjustEnd = CGF.createBasicBlock(name: "adjust.end");
86
87 llvm::Value *IsNull = CGF.Builder.CreateIsNull(Arg: ReturnValue);
88 CGF.Builder.CreateCondBr(Cond: IsNull, True: AdjustNull, False: AdjustNotNull);
89 CGF.EmitBlock(BB: AdjustNotNull);
90 }
91
92 auto ClassDecl = ResultType->getPointeeType()->getAsCXXRecordDecl();
93 auto ClassAlign = CGF.CGM.getClassPointerAlignment(CD: ClassDecl);
94 ReturnValue = CGF.CGM.getCXXABI().performReturnAdjustment(
95 CGF,
96 Ret: Address(ReturnValue, CGF.ConvertTypeForMem(T: ResultType->getPointeeType()),
97 ClassAlign),
98 RA: Thunk.Return);
99
100 if (NullCheckValue) {
101 CGF.Builder.CreateBr(Dest: AdjustEnd);
102 CGF.EmitBlock(BB: AdjustNull);
103 CGF.Builder.CreateBr(Dest: AdjustEnd);
104 CGF.EmitBlock(BB: AdjustEnd);
105
106 llvm::PHINode *PHI = CGF.Builder.CreatePHI(Ty: ReturnValue->getType(), NumReservedValues: 2);
107 PHI->addIncoming(V: ReturnValue, BB: AdjustNotNull);
108 PHI->addIncoming(V: llvm::Constant::getNullValue(Ty: ReturnValue->getType()),
109 BB: AdjustNull);
110 ReturnValue = PHI;
111 }
112
113 return RValue::get(V: ReturnValue);
114}
115
116/// This function clones a function's DISubprogram node and enters it into
117/// a value map with the intent that the map can be utilized by the cloner
118/// to short-circuit Metadata node mapping.
119/// Furthermore, the function resolves any DILocalVariable nodes referenced
120/// by dbg.value intrinsics so they can be properly mapped during cloning.
121static void resolveTopLevelMetadata(llvm::Function *Fn,
122 llvm::ValueToValueMapTy &VMap) {
123 // Clone the DISubprogram node and put it into the Value map.
124 auto *DIS = Fn->getSubprogram();
125 if (!DIS)
126 return;
127 auto *NewDIS = DIS->replaceWithDistinct(N: DIS->clone());
128 VMap.MD()[DIS].reset(MD: NewDIS);
129
130 // Find all llvm.dbg.declare intrinsics and resolve the DILocalVariable nodes
131 // they are referencing.
132 for (auto &BB : *Fn) {
133 for (auto &I : BB) {
134 if (auto *DII = dyn_cast<llvm::DbgVariableIntrinsic>(Val: &I)) {
135 auto *DILocal = DII->getVariable();
136 if (!DILocal->isResolved())
137 DILocal->resolve();
138 }
139 }
140 }
141}
142
143// This function does roughly the same thing as GenerateThunk, but in a
144// very different way, so that va_start and va_end work correctly.
145// FIXME: This function assumes "this" is the first non-sret LLVM argument of
146// a function, and that there is an alloca built in the entry block
147// for all accesses to "this".
148// FIXME: This function assumes there is only one "ret" statement per function.
149// FIXME: Cloning isn't correct in the presence of indirect goto!
150// FIXME: This implementation of thunks bloats codesize by duplicating the
151// function definition. There are alternatives:
152// 1. Add some sort of stub support to LLVM for cases where we can
153// do a this adjustment, then a sibcall.
154// 2. We could transform the definition to take a va_list instead of an
155// actual variable argument list, then have the thunks (including a
156// no-op thunk for the regular definition) call va_start/va_end.
157// There's a bit of per-call overhead for this solution, but it's
158// better for codesize if the definition is long.
159llvm::Function *
160CodeGenFunction::GenerateVarArgsThunk(llvm::Function *Fn,
161 const CGFunctionInfo &FnInfo,
162 GlobalDecl GD, const ThunkInfo &Thunk) {
163 const CXXMethodDecl *MD = cast<CXXMethodDecl>(Val: GD.getDecl());
164 const FunctionProtoType *FPT = MD->getType()->castAs<FunctionProtoType>();
165 QualType ResultType = FPT->getReturnType();
166
167 // Get the original function
168 assert(FnInfo.isVariadic());
169 llvm::Type *Ty = CGM.getTypes().GetFunctionType(Info: FnInfo);
170 llvm::Value *Callee = CGM.GetAddrOfFunction(GD, Ty, /*ForVTable=*/true);
171 llvm::Function *BaseFn = cast<llvm::Function>(Val: Callee);
172
173 // Cloning can't work if we don't have a definition. The Microsoft ABI may
174 // require thunks when a definition is not available. Emit an error in these
175 // cases.
176 if (!MD->isDefined()) {
177 CGM.ErrorUnsupported(MD, "return-adjusting thunk with variadic arguments");
178 return Fn;
179 }
180 assert(!BaseFn->isDeclaration() && "cannot clone undefined variadic method");
181
182 // Clone to thunk.
183 llvm::ValueToValueMapTy VMap;
184
185 // We are cloning a function while some Metadata nodes are still unresolved.
186 // Ensure that the value mapper does not encounter any of them.
187 resolveTopLevelMetadata(Fn: BaseFn, VMap);
188 llvm::Function *NewFn = llvm::CloneFunction(F: BaseFn, VMap);
189 Fn->replaceAllUsesWith(V: NewFn);
190 NewFn->takeName(V: Fn);
191 Fn->eraseFromParent();
192 Fn = NewFn;
193
194 // "Initialize" CGF (minimally).
195 CurFn = Fn;
196
197 // Get the "this" value
198 llvm::Function::arg_iterator AI = Fn->arg_begin();
199 if (CGM.ReturnTypeUsesSRet(FI: FnInfo))
200 ++AI;
201
202 // Find the first store of "this", which will be to the alloca associated
203 // with "this".
204 Address ThisPtr =
205 Address(&*AI, ConvertTypeForMem(T: MD->getFunctionObjectParameterType()),
206 CGM.getClassPointerAlignment(CD: MD->getParent()));
207 llvm::BasicBlock *EntryBB = &Fn->front();
208 llvm::BasicBlock::iterator ThisStore =
209 llvm::find_if(Range&: *EntryBB, P: [&](llvm::Instruction &I) {
210 return isa<llvm::StoreInst>(Val: I) &&
211 I.getOperand(i: 0) == ThisPtr.getPointer();
212 });
213 assert(ThisStore != EntryBB->end() &&
214 "Store of this should be in entry block?");
215 // Adjust "this", if necessary.
216 Builder.SetInsertPoint(&*ThisStore);
217 llvm::Value *AdjustedThisPtr =
218 CGM.getCXXABI().performThisAdjustment(CGF&: *this, This: ThisPtr, TA: Thunk.This);
219 AdjustedThisPtr = Builder.CreateBitCast(V: AdjustedThisPtr,
220 DestTy: ThisStore->getOperand(i: 0)->getType());
221 ThisStore->setOperand(i: 0, Val: AdjustedThisPtr);
222
223 if (!Thunk.Return.isEmpty()) {
224 // Fix up the returned value, if necessary.
225 for (llvm::BasicBlock &BB : *Fn) {
226 llvm::Instruction *T = BB.getTerminator();
227 if (isa<llvm::ReturnInst>(Val: T)) {
228 RValue RV = RValue::get(V: T->getOperand(i: 0));
229 T->eraseFromParent();
230 Builder.SetInsertPoint(&BB);
231 RV = PerformReturnAdjustment(CGF&: *this, ResultType, RV, Thunk);
232 Builder.CreateRet(V: RV.getScalarVal());
233 break;
234 }
235 }
236 }
237
238 return Fn;
239}
240
241void CodeGenFunction::StartThunk(llvm::Function *Fn, GlobalDecl GD,
242 const CGFunctionInfo &FnInfo,
243 bool IsUnprototyped) {
244 assert(!CurGD.getDecl() && "CurGD was already set!");
245 CurGD = GD;
246 CurFuncIsThunk = true;
247
248 // Build FunctionArgs.
249 const CXXMethodDecl *MD = cast<CXXMethodDecl>(Val: GD.getDecl());
250 QualType ThisType = MD->getThisType();
251 QualType ResultType;
252 if (IsUnprototyped)
253 ResultType = CGM.getContext().VoidTy;
254 else if (CGM.getCXXABI().HasThisReturn(GD))
255 ResultType = ThisType;
256 else if (CGM.getCXXABI().hasMostDerivedReturn(GD))
257 ResultType = CGM.getContext().VoidPtrTy;
258 else
259 ResultType = MD->getType()->castAs<FunctionProtoType>()->getReturnType();
260 FunctionArgList FunctionArgs;
261
262 // Create the implicit 'this' parameter declaration.
263 CGM.getCXXABI().buildThisParam(CGF&: *this, Params&: FunctionArgs);
264
265 // Add the rest of the parameters, if we have a prototype to work with.
266 if (!IsUnprototyped) {
267 FunctionArgs.append(MD->param_begin(), MD->param_end());
268
269 if (isa<CXXDestructorDecl>(Val: MD))
270 CGM.getCXXABI().addImplicitStructorParams(CGF&: *this, ResTy&: ResultType,
271 Params&: FunctionArgs);
272 }
273
274 // Start defining the function.
275 auto NL = ApplyDebugLocation::CreateEmpty(CGF&: *this);
276 StartFunction(GD: GlobalDecl(), RetTy: ResultType, Fn, FnInfo, Args: FunctionArgs,
277 Loc: MD->getLocation());
278 // Create a scope with an artificial location for the body of this function.
279 auto AL = ApplyDebugLocation::CreateArtificial(CGF&: *this);
280
281 // Since we didn't pass a GlobalDecl to StartFunction, do this ourselves.
282 CGM.getCXXABI().EmitInstanceFunctionProlog(CGF&: *this);
283 CXXThisValue = CXXABIThisValue;
284 CurCodeDecl = MD;
285 CurFuncDecl = MD;
286}
287
288void CodeGenFunction::FinishThunk() {
289 // Clear these to restore the invariants expected by
290 // StartFunction/FinishFunction.
291 CurCodeDecl = nullptr;
292 CurFuncDecl = nullptr;
293
294 FinishFunction();
295}
296
297void CodeGenFunction::EmitCallAndReturnForThunk(llvm::FunctionCallee Callee,
298 const ThunkInfo *Thunk,
299 bool IsUnprototyped) {
300 assert(isa<CXXMethodDecl>(CurGD.getDecl()) &&
301 "Please use a new CGF for this thunk");
302 const CXXMethodDecl *MD = cast<CXXMethodDecl>(Val: CurGD.getDecl());
303
304 // Adjust the 'this' pointer if necessary
305 llvm::Value *AdjustedThisPtr =
306 Thunk ? CGM.getCXXABI().performThisAdjustment(
307 CGF&: *this, This: LoadCXXThisAddress(), TA: Thunk->This)
308 : LoadCXXThis();
309
310 // If perfect forwarding is required a variadic method, a method using
311 // inalloca, or an unprototyped thunk, use musttail. Emit an error if this
312 // thunk requires a return adjustment, since that is impossible with musttail.
313 if (CurFnInfo->usesInAlloca() || CurFnInfo->isVariadic() || IsUnprototyped) {
314 if (Thunk && !Thunk->Return.isEmpty()) {
315 if (IsUnprototyped)
316 CGM.ErrorUnsupported(
317 MD, "return-adjusting thunk with incomplete parameter type");
318 else if (CurFnInfo->isVariadic())
319 llvm_unreachable("shouldn't try to emit musttail return-adjusting "
320 "thunks for variadic functions");
321 else
322 CGM.ErrorUnsupported(
323 MD, "non-trivial argument copy for return-adjusting thunk");
324 }
325 EmitMustTailThunk(GD: CurGD, AdjustedThisPtr, Callee);
326 return;
327 }
328
329 // Start building CallArgs.
330 CallArgList CallArgs;
331 QualType ThisType = MD->getThisType();
332 CallArgs.add(rvalue: RValue::get(V: AdjustedThisPtr), type: ThisType);
333
334 if (isa<CXXDestructorDecl>(Val: MD))
335 CGM.getCXXABI().adjustCallArgsForDestructorThunk(CGF&: *this, GD: CurGD, CallArgs);
336
337#ifndef NDEBUG
338 unsigned PrefixArgs = CallArgs.size() - 1;
339#endif
340 // Add the rest of the arguments.
341 for (const ParmVarDecl *PD : MD->parameters())
342 EmitDelegateCallArg(CallArgs, PD, SourceLocation());
343
344 const FunctionProtoType *FPT = MD->getType()->castAs<FunctionProtoType>();
345
346#ifndef NDEBUG
347 const CGFunctionInfo &CallFnInfo = CGM.getTypes().arrangeCXXMethodCall(
348 args: CallArgs, type: FPT, required: RequiredArgs::forPrototypePlus(prototype: FPT, additional: 1), numPrefixArgs: PrefixArgs);
349 assert(CallFnInfo.getRegParm() == CurFnInfo->getRegParm() &&
350 CallFnInfo.isNoReturn() == CurFnInfo->isNoReturn() &&
351 CallFnInfo.getCallingConvention() == CurFnInfo->getCallingConvention());
352 assert(isa<CXXDestructorDecl>(MD) || // ignore dtor return types
353 similar(CallFnInfo.getReturnInfo(), CallFnInfo.getReturnType(),
354 CurFnInfo->getReturnInfo(), CurFnInfo->getReturnType()));
355 assert(CallFnInfo.arg_size() == CurFnInfo->arg_size());
356 for (unsigned i = 0, e = CurFnInfo->arg_size(); i != e; ++i)
357 assert(similar(CallFnInfo.arg_begin()[i].info,
358 CallFnInfo.arg_begin()[i].type,
359 CurFnInfo->arg_begin()[i].info,
360 CurFnInfo->arg_begin()[i].type));
361#endif
362
363 // Determine whether we have a return value slot to use.
364 QualType ResultType = CGM.getCXXABI().HasThisReturn(GD: CurGD)
365 ? ThisType
366 : CGM.getCXXABI().hasMostDerivedReturn(GD: CurGD)
367 ? CGM.getContext().VoidPtrTy
368 : FPT->getReturnType();
369 ReturnValueSlot Slot;
370 if (!ResultType->isVoidType() &&
371 (CurFnInfo->getReturnInfo().getKind() == ABIArgInfo::Indirect ||
372 hasAggregateEvaluationKind(T: ResultType)))
373 Slot = ReturnValueSlot(ReturnValue, ResultType.isVolatileQualified(),
374 /*IsUnused=*/false, /*IsExternallyDestructed=*/true);
375
376 // Now emit our call.
377 llvm::CallBase *CallOrInvoke;
378 RValue RV = EmitCall(CallInfo: *CurFnInfo, Callee: CGCallee::forDirect(functionPtr: Callee, abstractInfo: CurGD), ReturnValue: Slot,
379 Args: CallArgs, callOrInvoke: &CallOrInvoke);
380
381 // Consider return adjustment if we have ThunkInfo.
382 if (Thunk && !Thunk->Return.isEmpty())
383 RV = PerformReturnAdjustment(CGF&: *this, ResultType, RV, Thunk: *Thunk);
384 else if (llvm::CallInst* Call = dyn_cast<llvm::CallInst>(Val: CallOrInvoke))
385 Call->setTailCallKind(llvm::CallInst::TCK_Tail);
386
387 // Emit return.
388 if (!ResultType->isVoidType() && Slot.isNull())
389 CGM.getCXXABI().EmitReturnFromThunk(CGF&: *this, RV, ResultType);
390
391 // Disable the final ARC autorelease.
392 AutoreleaseResult = false;
393
394 FinishThunk();
395}
396
397void CodeGenFunction::EmitMustTailThunk(GlobalDecl GD,
398 llvm::Value *AdjustedThisPtr,
399 llvm::FunctionCallee Callee) {
400 // Emitting a musttail call thunk doesn't use any of the CGCall.cpp machinery
401 // to translate AST arguments into LLVM IR arguments. For thunks, we know
402 // that the caller prototype more or less matches the callee prototype with
403 // the exception of 'this'.
404 SmallVector<llvm::Value *, 8> Args(llvm::make_pointer_range(Range: CurFn->args()));
405
406 // Set the adjusted 'this' pointer.
407 const ABIArgInfo &ThisAI = CurFnInfo->arg_begin()->info;
408 if (ThisAI.isDirect()) {
409 const ABIArgInfo &RetAI = CurFnInfo->getReturnInfo();
410 int ThisArgNo = RetAI.isIndirect() && !RetAI.isSRetAfterThis() ? 1 : 0;
411 llvm::Type *ThisType = Args[ThisArgNo]->getType();
412 if (ThisType != AdjustedThisPtr->getType())
413 AdjustedThisPtr = Builder.CreateBitCast(V: AdjustedThisPtr, DestTy: ThisType);
414 Args[ThisArgNo] = AdjustedThisPtr;
415 } else {
416 assert(ThisAI.isInAlloca() && "this is passed directly or inalloca");
417 Address ThisAddr = GetAddrOfLocalVar(CXXABIThisDecl);
418 llvm::Type *ThisType = ThisAddr.getElementType();
419 if (ThisType != AdjustedThisPtr->getType())
420 AdjustedThisPtr = Builder.CreateBitCast(V: AdjustedThisPtr, DestTy: ThisType);
421 Builder.CreateStore(Val: AdjustedThisPtr, Addr: ThisAddr);
422 }
423
424 // Emit the musttail call manually. Even if the prologue pushed cleanups, we
425 // don't actually want to run them.
426 llvm::CallInst *Call = Builder.CreateCall(Callee, Args);
427 Call->setTailCallKind(llvm::CallInst::TCK_MustTail);
428
429 // Apply the standard set of call attributes.
430 unsigned CallingConv;
431 llvm::AttributeList Attrs;
432 CGM.ConstructAttributeList(Name: Callee.getCallee()->getName(), Info: *CurFnInfo, CalleeInfo: GD,
433 Attrs, CallingConv, /*AttrOnCallSite=*/true,
434 /*IsThunk=*/false);
435 Call->setAttributes(Attrs);
436 Call->setCallingConv(static_cast<llvm::CallingConv::ID>(CallingConv));
437
438 if (Call->getType()->isVoidTy())
439 Builder.CreateRetVoid();
440 else
441 Builder.CreateRet(V: Call);
442
443 // Finish the function to maintain CodeGenFunction invariants.
444 // FIXME: Don't emit unreachable code.
445 EmitBlock(BB: createBasicBlock());
446
447 FinishThunk();
448}
449
450void CodeGenFunction::generateThunk(llvm::Function *Fn,
451 const CGFunctionInfo &FnInfo, GlobalDecl GD,
452 const ThunkInfo &Thunk,
453 bool IsUnprototyped) {
454 StartThunk(Fn, GD, FnInfo, IsUnprototyped);
455 // Create a scope with an artificial location for the body of this function.
456 auto AL = ApplyDebugLocation::CreateArtificial(CGF&: *this);
457
458 // Get our callee. Use a placeholder type if this method is unprototyped so
459 // that CodeGenModule doesn't try to set attributes.
460 llvm::Type *Ty;
461 if (IsUnprototyped)
462 Ty = llvm::StructType::get(Context&: getLLVMContext());
463 else
464 Ty = CGM.getTypes().GetFunctionType(Info: FnInfo);
465
466 llvm::Constant *Callee = CGM.GetAddrOfFunction(GD, Ty, /*ForVTable=*/true);
467
468 // Make the call and return the result.
469 EmitCallAndReturnForThunk(Callee: llvm::FunctionCallee(Fn->getFunctionType(), Callee),
470 Thunk: &Thunk, IsUnprototyped);
471}
472
473static bool shouldEmitVTableThunk(CodeGenModule &CGM, const CXXMethodDecl *MD,
474 bool IsUnprototyped, bool ForVTable) {
475 // Always emit thunks in the MS C++ ABI. We cannot rely on other TUs to
476 // provide thunks for us.
477 if (CGM.getTarget().getCXXABI().isMicrosoft())
478 return true;
479
480 // In the Itanium C++ ABI, vtable thunks are provided by TUs that provide
481 // definitions of the main method. Therefore, emitting thunks with the vtable
482 // is purely an optimization. Emit the thunk if optimizations are enabled and
483 // all of the parameter types are complete.
484 if (ForVTable)
485 return CGM.getCodeGenOpts().OptimizationLevel && !IsUnprototyped;
486
487 // Always emit thunks along with the method definition.
488 return true;
489}
490
491llvm::Constant *CodeGenVTables::maybeEmitThunk(GlobalDecl GD,
492 const ThunkInfo &TI,
493 bool ForVTable) {
494 const CXXMethodDecl *MD = cast<CXXMethodDecl>(Val: GD.getDecl());
495
496 // First, get a declaration. Compute the mangled name. Don't worry about
497 // getting the function prototype right, since we may only need this
498 // declaration to fill in a vtable slot.
499 SmallString<256> Name;
500 MangleContext &MCtx = CGM.getCXXABI().getMangleContext();
501 llvm::raw_svector_ostream Out(Name);
502 if (const CXXDestructorDecl *DD = dyn_cast<CXXDestructorDecl>(Val: MD))
503 MCtx.mangleCXXDtorThunk(DD, Type: GD.getDtorType(), ThisAdjustment: TI.This, Out);
504 else
505 MCtx.mangleThunk(MD, Thunk: TI, Out);
506 llvm::Type *ThunkVTableTy = CGM.getTypes().GetFunctionTypeForVTable(GD);
507 llvm::Constant *Thunk = CGM.GetAddrOfThunk(Name, FnTy: ThunkVTableTy, GD);
508
509 // If we don't need to emit a definition, return this declaration as is.
510 bool IsUnprototyped = !CGM.getTypes().isFuncTypeConvertible(
511 FT: MD->getType()->castAs<FunctionType>());
512 if (!shouldEmitVTableThunk(CGM, MD, IsUnprototyped, ForVTable))
513 return Thunk;
514
515 // Arrange a function prototype appropriate for a function definition. In some
516 // cases in the MS ABI, we may need to build an unprototyped musttail thunk.
517 const CGFunctionInfo &FnInfo =
518 IsUnprototyped ? CGM.getTypes().arrangeUnprototypedMustTailThunk(MD)
519 : CGM.getTypes().arrangeGlobalDeclaration(GD);
520 llvm::FunctionType *ThunkFnTy = CGM.getTypes().GetFunctionType(Info: FnInfo);
521
522 // If the type of the underlying GlobalValue is wrong, we'll have to replace
523 // it. It should be a declaration.
524 llvm::Function *ThunkFn = cast<llvm::Function>(Val: Thunk->stripPointerCasts());
525 if (ThunkFn->getFunctionType() != ThunkFnTy) {
526 llvm::GlobalValue *OldThunkFn = ThunkFn;
527
528 assert(OldThunkFn->isDeclaration() && "Shouldn't replace non-declaration");
529
530 // Remove the name from the old thunk function and get a new thunk.
531 OldThunkFn->setName(StringRef());
532 ThunkFn = llvm::Function::Create(Ty: ThunkFnTy, Linkage: llvm::Function::ExternalLinkage,
533 N: Name.str(), M: &CGM.getModule());
534 CGM.SetLLVMFunctionAttributes(MD, FnInfo, ThunkFn, /*IsThunk=*/false);
535
536 if (!OldThunkFn->use_empty()) {
537 OldThunkFn->replaceAllUsesWith(V: ThunkFn);
538 }
539
540 // Remove the old thunk.
541 OldThunkFn->eraseFromParent();
542 }
543
544 bool ABIHasKeyFunctions = CGM.getTarget().getCXXABI().hasKeyFunctions();
545 bool UseAvailableExternallyLinkage = ForVTable && ABIHasKeyFunctions;
546
547 if (!ThunkFn->isDeclaration()) {
548 if (!ABIHasKeyFunctions || UseAvailableExternallyLinkage) {
549 // There is already a thunk emitted for this function, do nothing.
550 return ThunkFn;
551 }
552
553 setThunkProperties(CGM, Thunk: TI, ThunkFn, ForVTable, GD);
554 return ThunkFn;
555 }
556
557 // If this will be unprototyped, add the "thunk" attribute so that LLVM knows
558 // that the return type is meaningless. These thunks can be used to call
559 // functions with differing return types, and the caller is required to cast
560 // the prototype appropriately to extract the correct value.
561 if (IsUnprototyped)
562 ThunkFn->addFnAttr(Kind: "thunk");
563
564 CGM.SetLLVMFunctionAttributesForDefinition(D: GD.getDecl(), F: ThunkFn);
565
566 // Thunks for variadic methods are special because in general variadic
567 // arguments cannot be perfectly forwarded. In the general case, clang
568 // implements such thunks by cloning the original function body. However, for
569 // thunks with no return adjustment on targets that support musttail, we can
570 // use musttail to perfectly forward the variadic arguments.
571 bool ShouldCloneVarArgs = false;
572 if (!IsUnprototyped && ThunkFn->isVarArg()) {
573 ShouldCloneVarArgs = true;
574 if (TI.Return.isEmpty()) {
575 switch (CGM.getTriple().getArch()) {
576 case llvm::Triple::x86_64:
577 case llvm::Triple::x86:
578 case llvm::Triple::aarch64:
579 ShouldCloneVarArgs = false;
580 break;
581 default:
582 break;
583 }
584 }
585 }
586
587 if (ShouldCloneVarArgs) {
588 if (UseAvailableExternallyLinkage)
589 return ThunkFn;
590 ThunkFn =
591 CodeGenFunction(CGM).GenerateVarArgsThunk(Fn: ThunkFn, FnInfo, GD, Thunk: TI);
592 } else {
593 // Normal thunk body generation.
594 CodeGenFunction(CGM).generateThunk(Fn: ThunkFn, FnInfo, GD, Thunk: TI, IsUnprototyped);
595 }
596
597 setThunkProperties(CGM, Thunk: TI, ThunkFn, ForVTable, GD);
598 return ThunkFn;
599}
600
601void CodeGenVTables::EmitThunks(GlobalDecl GD) {
602 const CXXMethodDecl *MD =
603 cast<CXXMethodDecl>(Val: GD.getDecl())->getCanonicalDecl();
604
605 // We don't need to generate thunks for the base destructor.
606 if (isa<CXXDestructorDecl>(Val: MD) && GD.getDtorType() == Dtor_Base)
607 return;
608
609 const VTableContextBase::ThunkInfoVectorTy *ThunkInfoVector =
610 VTContext->getThunkInfo(GD);
611
612 if (!ThunkInfoVector)
613 return;
614
615 for (const ThunkInfo& Thunk : *ThunkInfoVector)
616 maybeEmitThunk(GD, TI: Thunk, /*ForVTable=*/false);
617}
618
619void CodeGenVTables::addRelativeComponent(ConstantArrayBuilder &builder,
620 llvm::Constant *component,
621 unsigned vtableAddressPoint,
622 bool vtableHasLocalLinkage,
623 bool isCompleteDtor) const {
624 // No need to get the offset of a nullptr.
625 if (component->isNullValue())
626 return builder.add(value: llvm::ConstantInt::get(Ty: CGM.Int32Ty, V: 0));
627
628 auto *globalVal =
629 cast<llvm::GlobalValue>(Val: component->stripPointerCastsAndAliases());
630 llvm::Module &module = CGM.getModule();
631
632 // We don't want to copy the linkage of the vtable exactly because we still
633 // want the stub/proxy to be emitted for properly calculating the offset.
634 // Examples where there would be no symbol emitted are available_externally
635 // and private linkages.
636 //
637 // `internal` linkage results in STB_LOCAL Elf binding while still manifesting a
638 // local symbol.
639 //
640 // `linkonce_odr` linkage results in a STB_DEFAULT Elf binding but also allows for
641 // the rtti_proxy to be transparently replaced with a GOTPCREL reloc by a
642 // target that supports this replacement.
643 auto stubLinkage = vtableHasLocalLinkage
644 ? llvm::GlobalValue::InternalLinkage
645 : llvm::GlobalValue::LinkOnceODRLinkage;
646
647 llvm::Constant *target;
648 if (auto *func = dyn_cast<llvm::Function>(Val: globalVal)) {
649 target = llvm::DSOLocalEquivalent::get(GV: func);
650 } else {
651 llvm::SmallString<16> rttiProxyName(globalVal->getName());
652 rttiProxyName.append(RHS: ".rtti_proxy");
653
654 // The RTTI component may not always be emitted in the same linkage unit as
655 // the vtable. As a general case, we can make a dso_local proxy to the RTTI
656 // that points to the actual RTTI struct somewhere. This will result in a
657 // GOTPCREL relocation when taking the relative offset to the proxy.
658 llvm::GlobalVariable *proxy = module.getNamedGlobal(Name: rttiProxyName);
659 if (!proxy) {
660 proxy = new llvm::GlobalVariable(module, globalVal->getType(),
661 /*isConstant=*/true, stubLinkage,
662 globalVal, rttiProxyName);
663 proxy->setDSOLocal(true);
664 proxy->setUnnamedAddr(llvm::GlobalValue::UnnamedAddr::Global);
665 if (!proxy->hasLocalLinkage()) {
666 proxy->setVisibility(llvm::GlobalValue::HiddenVisibility);
667 proxy->setComdat(module.getOrInsertComdat(Name: rttiProxyName));
668 }
669 // Do not instrument the rtti proxies with hwasan to avoid a duplicate
670 // symbol error. Aliases generated by hwasan will retain the same namebut
671 // the addresses they are set to may have different tags from different
672 // compilation units. We don't run into this without hwasan because the
673 // proxies are in comdat groups, but those aren't propagated to the alias.
674 RemoveHwasanMetadata(GV: proxy);
675 }
676 target = proxy;
677 }
678
679 builder.addRelativeOffsetToPosition(type: CGM.Int32Ty, target,
680 /*position=*/vtableAddressPoint);
681}
682
683static bool UseRelativeLayout(const CodeGenModule &CGM) {
684 return CGM.getTarget().getCXXABI().isItaniumFamily() &&
685 CGM.getItaniumVTableContext().isRelativeLayout();
686}
687
688bool CodeGenVTables::useRelativeLayout() const {
689 return UseRelativeLayout(CGM);
690}
691
692llvm::Type *CodeGenModule::getVTableComponentType() const {
693 if (UseRelativeLayout(CGM: *this))
694 return Int32Ty;
695 return GlobalsInt8PtrTy;
696}
697
698llvm::Type *CodeGenVTables::getVTableComponentType() const {
699 return CGM.getVTableComponentType();
700}
701
702static void AddPointerLayoutOffset(const CodeGenModule &CGM,
703 ConstantArrayBuilder &builder,
704 CharUnits offset) {
705 builder.add(value: llvm::ConstantExpr::getIntToPtr(
706 C: llvm::ConstantInt::get(Ty: CGM.PtrDiffTy, V: offset.getQuantity()),
707 Ty: CGM.GlobalsInt8PtrTy));
708}
709
710static void AddRelativeLayoutOffset(const CodeGenModule &CGM,
711 ConstantArrayBuilder &builder,
712 CharUnits offset) {
713 builder.add(value: llvm::ConstantInt::get(Ty: CGM.Int32Ty, V: offset.getQuantity()));
714}
715
716void CodeGenVTables::addVTableComponent(ConstantArrayBuilder &builder,
717 const VTableLayout &layout,
718 unsigned componentIndex,
719 llvm::Constant *rtti,
720 unsigned &nextVTableThunkIndex,
721 unsigned vtableAddressPoint,
722 bool vtableHasLocalLinkage) {
723 auto &component = layout.vtable_components()[componentIndex];
724
725 auto addOffsetConstant =
726 useRelativeLayout() ? AddRelativeLayoutOffset : AddPointerLayoutOffset;
727
728 switch (component.getKind()) {
729 case VTableComponent::CK_VCallOffset:
730 return addOffsetConstant(CGM, builder, component.getVCallOffset());
731
732 case VTableComponent::CK_VBaseOffset:
733 return addOffsetConstant(CGM, builder, component.getVBaseOffset());
734
735 case VTableComponent::CK_OffsetToTop:
736 return addOffsetConstant(CGM, builder, component.getOffsetToTop());
737
738 case VTableComponent::CK_RTTI:
739 if (useRelativeLayout())
740 return addRelativeComponent(builder, component: rtti, vtableAddressPoint,
741 vtableHasLocalLinkage,
742 /*isCompleteDtor=*/false);
743 else
744 return builder.add(value: rtti);
745
746 case VTableComponent::CK_FunctionPointer:
747 case VTableComponent::CK_CompleteDtorPointer:
748 case VTableComponent::CK_DeletingDtorPointer: {
749 GlobalDecl GD = component.getGlobalDecl();
750
751 if (CGM.getLangOpts().CUDA) {
752 // Emit NULL for methods we can't codegen on this
753 // side. Otherwise we'd end up with vtable with unresolved
754 // references.
755 const CXXMethodDecl *MD = cast<CXXMethodDecl>(Val: GD.getDecl());
756 // OK on device side: functions w/ __device__ attribute
757 // OK on host side: anything except __device__-only functions.
758 bool CanEmitMethod =
759 CGM.getLangOpts().CUDAIsDevice
760 ? MD->hasAttr<CUDADeviceAttr>()
761 : (MD->hasAttr<CUDAHostAttr>() || !MD->hasAttr<CUDADeviceAttr>());
762 if (!CanEmitMethod)
763 return builder.add(
764 value: llvm::ConstantExpr::getNullValue(Ty: CGM.GlobalsInt8PtrTy));
765 // Method is acceptable, continue processing as usual.
766 }
767
768 auto getSpecialVirtualFn = [&](StringRef name) -> llvm::Constant * {
769 // FIXME(PR43094): When merging comdat groups, lld can select a local
770 // symbol as the signature symbol even though it cannot be accessed
771 // outside that symbol's TU. The relative vtables ABI would make
772 // __cxa_pure_virtual and __cxa_deleted_virtual local symbols, and
773 // depending on link order, the comdat groups could resolve to the one
774 // with the local symbol. As a temporary solution, fill these components
775 // with zero. We shouldn't be calling these in the first place anyway.
776 if (useRelativeLayout())
777 return llvm::ConstantPointerNull::get(T: CGM.GlobalsInt8PtrTy);
778
779 // For NVPTX devices in OpenMP emit special functon as null pointers,
780 // otherwise linking ends up with unresolved references.
781 if (CGM.getLangOpts().OpenMP && CGM.getLangOpts().OpenMPIsTargetDevice &&
782 CGM.getTriple().isNVPTX())
783 return llvm::ConstantPointerNull::get(T: CGM.GlobalsInt8PtrTy);
784 llvm::FunctionType *fnTy =
785 llvm::FunctionType::get(Result: CGM.VoidTy, /*isVarArg=*/false);
786 llvm::Constant *fn = cast<llvm::Constant>(
787 Val: CGM.CreateRuntimeFunction(Ty: fnTy, Name: name).getCallee());
788 if (auto f = dyn_cast<llvm::Function>(Val: fn))
789 f->setUnnamedAddr(llvm::GlobalValue::UnnamedAddr::Global);
790 return fn;
791 };
792
793 llvm::Constant *fnPtr;
794
795 // Pure virtual member functions.
796 if (cast<CXXMethodDecl>(Val: GD.getDecl())->isPureVirtual()) {
797 if (!PureVirtualFn)
798 PureVirtualFn =
799 getSpecialVirtualFn(CGM.getCXXABI().GetPureVirtualCallName());
800 fnPtr = PureVirtualFn;
801
802 // Deleted virtual member functions.
803 } else if (cast<CXXMethodDecl>(Val: GD.getDecl())->isDeleted()) {
804 if (!DeletedVirtualFn)
805 DeletedVirtualFn =
806 getSpecialVirtualFn(CGM.getCXXABI().GetDeletedVirtualCallName());
807 fnPtr = DeletedVirtualFn;
808
809 // Thunks.
810 } else if (nextVTableThunkIndex < layout.vtable_thunks().size() &&
811 layout.vtable_thunks()[nextVTableThunkIndex].first ==
812 componentIndex) {
813 auto &thunkInfo = layout.vtable_thunks()[nextVTableThunkIndex].second;
814
815 nextVTableThunkIndex++;
816 fnPtr = maybeEmitThunk(GD, TI: thunkInfo, /*ForVTable=*/true);
817
818 // Otherwise we can use the method definition directly.
819 } else {
820 llvm::Type *fnTy = CGM.getTypes().GetFunctionTypeForVTable(GD);
821 fnPtr = CGM.GetAddrOfFunction(GD, Ty: fnTy, /*ForVTable=*/true);
822 }
823
824 if (useRelativeLayout()) {
825 return addRelativeComponent(
826 builder, component: fnPtr, vtableAddressPoint, vtableHasLocalLinkage,
827 isCompleteDtor: component.getKind() == VTableComponent::CK_CompleteDtorPointer);
828 } else {
829 // TODO: this icky and only exists due to functions being in the generic
830 // address space, rather than the global one, even though they are
831 // globals; fixing said issue might be intrusive, and will be done
832 // later.
833 unsigned FnAS = fnPtr->getType()->getPointerAddressSpace();
834 unsigned GVAS = CGM.GlobalsInt8PtrTy->getPointerAddressSpace();
835
836 if (FnAS != GVAS)
837 fnPtr =
838 llvm::ConstantExpr::getAddrSpaceCast(C: fnPtr, Ty: CGM.GlobalsInt8PtrTy);
839 return builder.add(value: fnPtr);
840 }
841 }
842
843 case VTableComponent::CK_UnusedFunctionPointer:
844 if (useRelativeLayout())
845 return builder.add(value: llvm::ConstantExpr::getNullValue(Ty: CGM.Int32Ty));
846 else
847 return builder.addNullPointer(ptrTy: CGM.GlobalsInt8PtrTy);
848 }
849
850 llvm_unreachable("Unexpected vtable component kind");
851}
852
853llvm::Type *CodeGenVTables::getVTableType(const VTableLayout &layout) {
854 SmallVector<llvm::Type *, 4> tys;
855 llvm::Type *componentType = getVTableComponentType();
856 for (unsigned i = 0, e = layout.getNumVTables(); i != e; ++i)
857 tys.push_back(Elt: llvm::ArrayType::get(ElementType: componentType, NumElements: layout.getVTableSize(i)));
858
859 return llvm::StructType::get(Context&: CGM.getLLVMContext(), Elements: tys);
860}
861
862void CodeGenVTables::createVTableInitializer(ConstantStructBuilder &builder,
863 const VTableLayout &layout,
864 llvm::Constant *rtti,
865 bool vtableHasLocalLinkage) {
866 llvm::Type *componentType = getVTableComponentType();
867
868 const auto &addressPoints = layout.getAddressPointIndices();
869 unsigned nextVTableThunkIndex = 0;
870 for (unsigned vtableIndex = 0, endIndex = layout.getNumVTables();
871 vtableIndex != endIndex; ++vtableIndex) {
872 auto vtableElem = builder.beginArray(eltTy: componentType);
873
874 size_t vtableStart = layout.getVTableOffset(i: vtableIndex);
875 size_t vtableEnd = vtableStart + layout.getVTableSize(i: vtableIndex);
876 for (size_t componentIndex = vtableStart; componentIndex < vtableEnd;
877 ++componentIndex) {
878 addVTableComponent(builder&: vtableElem, layout, componentIndex, rtti,
879 nextVTableThunkIndex, vtableAddressPoint: addressPoints[vtableIndex],
880 vtableHasLocalLinkage);
881 }
882 vtableElem.finishAndAddTo(parent&: builder);
883 }
884}
885
886llvm::GlobalVariable *CodeGenVTables::GenerateConstructionVTable(
887 const CXXRecordDecl *RD, const BaseSubobject &Base, bool BaseIsVirtual,
888 llvm::GlobalVariable::LinkageTypes Linkage,
889 VTableAddressPointsMapTy &AddressPoints) {
890 if (CGDebugInfo *DI = CGM.getModuleDebugInfo())
891 DI->completeClassData(Base.getBase());
892
893 std::unique_ptr<VTableLayout> VTLayout(
894 getItaniumVTableContext().createConstructionVTableLayout(
895 MostDerivedClass: Base.getBase(), MostDerivedClassOffset: Base.getBaseOffset(), MostDerivedClassIsVirtual: BaseIsVirtual, LayoutClass: RD));
896
897 // Add the address points.
898 AddressPoints = VTLayout->getAddressPoints();
899
900 // Get the mangled construction vtable name.
901 SmallString<256> OutName;
902 llvm::raw_svector_ostream Out(OutName);
903 cast<ItaniumMangleContext>(Val&: CGM.getCXXABI().getMangleContext())
904 .mangleCXXCtorVTable(RD, Offset: Base.getBaseOffset().getQuantity(),
905 Type: Base.getBase(), Out);
906 SmallString<256> Name(OutName);
907
908 bool UsingRelativeLayout = getItaniumVTableContext().isRelativeLayout();
909 bool VTableAliasExists =
910 UsingRelativeLayout && CGM.getModule().getNamedAlias(Name);
911 if (VTableAliasExists) {
912 // We previously made the vtable hidden and changed its name.
913 Name.append(RHS: ".local");
914 }
915
916 llvm::Type *VTType = getVTableType(layout: *VTLayout);
917
918 // Construction vtable symbols are not part of the Itanium ABI, so we cannot
919 // guarantee that they actually will be available externally. Instead, when
920 // emitting an available_externally VTT, we provide references to an internal
921 // linkage construction vtable. The ABI only requires complete-object vtables
922 // to be the same for all instances of a type, not construction vtables.
923 if (Linkage == llvm::GlobalVariable::AvailableExternallyLinkage)
924 Linkage = llvm::GlobalVariable::InternalLinkage;
925
926 llvm::Align Align = CGM.getDataLayout().getABITypeAlign(Ty: VTType);
927
928 // Create the variable that will hold the construction vtable.
929 llvm::GlobalVariable *VTable =
930 CGM.CreateOrReplaceCXXRuntimeVariable(Name, Ty: VTType, Linkage, Alignment: Align);
931
932 // V-tables are always unnamed_addr.
933 VTable->setUnnamedAddr(llvm::GlobalValue::UnnamedAddr::Global);
934
935 llvm::Constant *RTTI = CGM.GetAddrOfRTTIDescriptor(
936 Ty: CGM.getContext().getTagDeclType(Base.getBase()));
937
938 // Create and set the initializer.
939 ConstantInitBuilder builder(CGM);
940 auto components = builder.beginStruct();
941 createVTableInitializer(builder&: components, layout: *VTLayout, rtti: RTTI,
942 vtableHasLocalLinkage: VTable->hasLocalLinkage());
943 components.finishAndSetAsInitializer(global: VTable);
944
945 // Set properties only after the initializer has been set to ensure that the
946 // GV is treated as definition and not declaration.
947 assert(!VTable->isDeclaration() && "Shouldn't set properties on declaration");
948 CGM.setGVProperties(VTable, RD);
949
950 CGM.EmitVTableTypeMetadata(RD, VTable, VTLayout: *VTLayout.get());
951
952 if (UsingRelativeLayout) {
953 RemoveHwasanMetadata(GV: VTable);
954 if (!VTable->isDSOLocal())
955 GenerateRelativeVTableAlias(VTable, AliasNameRef: OutName);
956 }
957
958 return VTable;
959}
960
961// Ensure this vtable is not instrumented by hwasan. That is, a global alias is
962// not generated for it. This is mainly used by the relative-vtables ABI where
963// vtables instead contain 32-bit offsets between the vtable and function
964// pointers. Hwasan is disabled for these vtables for now because the tag in a
965// vtable pointer may fail the overflow check when resolving 32-bit PLT
966// relocations. A future alternative for this would be finding which usages of
967// the vtable can continue to use the untagged hwasan value without any loss of
968// value in hwasan.
969void CodeGenVTables::RemoveHwasanMetadata(llvm::GlobalValue *GV) const {
970 if (CGM.getLangOpts().Sanitize.has(K: SanitizerKind::HWAddress)) {
971 llvm::GlobalValue::SanitizerMetadata Meta;
972 if (GV->hasSanitizerMetadata())
973 Meta = GV->getSanitizerMetadata();
974 Meta.NoHWAddress = true;
975 GV->setSanitizerMetadata(Meta);
976 }
977}
978
979// If the VTable is not dso_local, then we will not be able to indicate that
980// the VTable does not need a relocation and move into rodata. A frequent
981// time this can occur is for classes that should be made public from a DSO
982// (like in libc++). For cases like these, we can make the vtable hidden or
983// private and create a public alias with the same visibility and linkage as
984// the original vtable type.
985void CodeGenVTables::GenerateRelativeVTableAlias(llvm::GlobalVariable *VTable,
986 llvm::StringRef AliasNameRef) {
987 assert(getItaniumVTableContext().isRelativeLayout() &&
988 "Can only use this if the relative vtable ABI is used");
989 assert(!VTable->isDSOLocal() && "This should be called only if the vtable is "
990 "not guaranteed to be dso_local");
991
992 // If the vtable is available_externally, we shouldn't (or need to) generate
993 // an alias for it in the first place since the vtable won't actually by
994 // emitted in this compilation unit.
995 if (VTable->hasAvailableExternallyLinkage())
996 return;
997
998 // Create a new string in the event the alias is already the name of the
999 // vtable. Using the reference directly could lead to use of an inititialized
1000 // value in the module's StringMap.
1001 llvm::SmallString<256> AliasName(AliasNameRef);
1002 VTable->setName(AliasName + ".local");
1003
1004 auto Linkage = VTable->getLinkage();
1005 assert(llvm::GlobalAlias::isValidLinkage(Linkage) &&
1006 "Invalid vtable alias linkage");
1007
1008 llvm::GlobalAlias *VTableAlias = CGM.getModule().getNamedAlias(Name: AliasName);
1009 if (!VTableAlias) {
1010 VTableAlias = llvm::GlobalAlias::create(Ty: VTable->getValueType(),
1011 AddressSpace: VTable->getAddressSpace(), Linkage,
1012 Name: AliasName, Parent: &CGM.getModule());
1013 } else {
1014 assert(VTableAlias->getValueType() == VTable->getValueType());
1015 assert(VTableAlias->getLinkage() == Linkage);
1016 }
1017 VTableAlias->setVisibility(VTable->getVisibility());
1018 VTableAlias->setUnnamedAddr(VTable->getUnnamedAddr());
1019
1020 // Both of these imply dso_local for the vtable.
1021 if (!VTable->hasComdat()) {
1022 // If this is in a comdat, then we shouldn't make the linkage private due to
1023 // an issue in lld where private symbols can be used as the key symbol when
1024 // choosing the prevelant group. This leads to "relocation refers to a
1025 // symbol in a discarded section".
1026 VTable->setLinkage(llvm::GlobalValue::PrivateLinkage);
1027 } else {
1028 // We should at least make this hidden since we don't want to expose it.
1029 VTable->setVisibility(llvm::GlobalValue::HiddenVisibility);
1030 }
1031
1032 VTableAlias->setAliasee(VTable);
1033}
1034
1035static bool shouldEmitAvailableExternallyVTable(const CodeGenModule &CGM,
1036 const CXXRecordDecl *RD) {
1037 return CGM.getCodeGenOpts().OptimizationLevel > 0 &&
1038 CGM.getCXXABI().canSpeculativelyEmitVTable(RD);
1039}
1040
1041/// Compute the required linkage of the vtable for the given class.
1042///
1043/// Note that we only call this at the end of the translation unit.
1044llvm::GlobalVariable::LinkageTypes
1045CodeGenModule::getVTableLinkage(const CXXRecordDecl *RD) {
1046 if (!RD->isExternallyVisible())
1047 return llvm::GlobalVariable::InternalLinkage;
1048
1049 // We're at the end of the translation unit, so the current key
1050 // function is fully correct.
1051 const CXXMethodDecl *keyFunction = Context.getCurrentKeyFunction(RD);
1052 if (keyFunction && !RD->hasAttr<DLLImportAttr>()) {
1053 // If this class has a key function, use that to determine the
1054 // linkage of the vtable.
1055 const FunctionDecl *def = nullptr;
1056 if (keyFunction->hasBody(def))
1057 keyFunction = cast<CXXMethodDecl>(Val: def);
1058
1059 switch (keyFunction->getTemplateSpecializationKind()) {
1060 case TSK_Undeclared:
1061 case TSK_ExplicitSpecialization:
1062 assert(
1063 (def || CodeGenOpts.OptimizationLevel > 0 ||
1064 CodeGenOpts.getDebugInfo() != llvm::codegenoptions::NoDebugInfo) &&
1065 "Shouldn't query vtable linkage without key function, "
1066 "optimizations, or debug info");
1067 if (!def && CodeGenOpts.OptimizationLevel > 0)
1068 return llvm::GlobalVariable::AvailableExternallyLinkage;
1069
1070 if (keyFunction->isInlined())
1071 return !Context.getLangOpts().AppleKext
1072 ? llvm::GlobalVariable::LinkOnceODRLinkage
1073 : llvm::Function::InternalLinkage;
1074
1075 return llvm::GlobalVariable::ExternalLinkage;
1076
1077 case TSK_ImplicitInstantiation:
1078 return !Context.getLangOpts().AppleKext ?
1079 llvm::GlobalVariable::LinkOnceODRLinkage :
1080 llvm::Function::InternalLinkage;
1081
1082 case TSK_ExplicitInstantiationDefinition:
1083 return !Context.getLangOpts().AppleKext ?
1084 llvm::GlobalVariable::WeakODRLinkage :
1085 llvm::Function::InternalLinkage;
1086
1087 case TSK_ExplicitInstantiationDeclaration:
1088 llvm_unreachable("Should not have been asked to emit this");
1089 }
1090 }
1091
1092 // -fapple-kext mode does not support weak linkage, so we must use
1093 // internal linkage.
1094 if (Context.getLangOpts().AppleKext)
1095 return llvm::Function::InternalLinkage;
1096
1097 llvm::GlobalVariable::LinkageTypes DiscardableODRLinkage =
1098 llvm::GlobalValue::LinkOnceODRLinkage;
1099 llvm::GlobalVariable::LinkageTypes NonDiscardableODRLinkage =
1100 llvm::GlobalValue::WeakODRLinkage;
1101 if (RD->hasAttr<DLLExportAttr>()) {
1102 // Cannot discard exported vtables.
1103 DiscardableODRLinkage = NonDiscardableODRLinkage;
1104 } else if (RD->hasAttr<DLLImportAttr>()) {
1105 // Imported vtables are available externally.
1106 DiscardableODRLinkage = llvm::GlobalVariable::AvailableExternallyLinkage;
1107 NonDiscardableODRLinkage = llvm::GlobalVariable::AvailableExternallyLinkage;
1108 }
1109
1110 switch (RD->getTemplateSpecializationKind()) {
1111 case TSK_Undeclared:
1112 case TSK_ExplicitSpecialization:
1113 case TSK_ImplicitInstantiation:
1114 return DiscardableODRLinkage;
1115
1116 case TSK_ExplicitInstantiationDeclaration:
1117 // Explicit instantiations in MSVC do not provide vtables, so we must emit
1118 // our own.
1119 if (getTarget().getCXXABI().isMicrosoft())
1120 return DiscardableODRLinkage;
1121 return shouldEmitAvailableExternallyVTable(CGM: *this, RD)
1122 ? llvm::GlobalVariable::AvailableExternallyLinkage
1123 : llvm::GlobalVariable::ExternalLinkage;
1124
1125 case TSK_ExplicitInstantiationDefinition:
1126 return NonDiscardableODRLinkage;
1127 }
1128
1129 llvm_unreachable("Invalid TemplateSpecializationKind!");
1130}
1131
1132/// This is a callback from Sema to tell us that a particular vtable is
1133/// required to be emitted in this translation unit.
1134///
1135/// This is only called for vtables that _must_ be emitted (mainly due to key
1136/// functions). For weak vtables, CodeGen tracks when they are needed and
1137/// emits them as-needed.
1138void CodeGenModule::EmitVTable(CXXRecordDecl *theClass) {
1139 VTables.GenerateClassData(RD: theClass);
1140}
1141
1142void
1143CodeGenVTables::GenerateClassData(const CXXRecordDecl *RD) {
1144 if (CGDebugInfo *DI = CGM.getModuleDebugInfo())
1145 DI->completeClassData(RD);
1146
1147 if (RD->getNumVBases())
1148 CGM.getCXXABI().emitVirtualInheritanceTables(RD);
1149
1150 CGM.getCXXABI().emitVTableDefinitions(CGVT&: *this, RD);
1151}
1152
1153/// At this point in the translation unit, does it appear that can we
1154/// rely on the vtable being defined elsewhere in the program?
1155///
1156/// The response is really only definitive when called at the end of
1157/// the translation unit.
1158///
1159/// The only semantic restriction here is that the object file should
1160/// not contain a vtable definition when that vtable is defined
1161/// strongly elsewhere. Otherwise, we'd just like to avoid emitting
1162/// vtables when unnecessary.
1163bool CodeGenVTables::isVTableExternal(const CXXRecordDecl *RD) {
1164 assert(RD->isDynamicClass() && "Non-dynamic classes have no VTable.");
1165
1166 // We always synthesize vtables if they are needed in the MS ABI. MSVC doesn't
1167 // emit them even if there is an explicit template instantiation.
1168 if (CGM.getTarget().getCXXABI().isMicrosoft())
1169 return false;
1170
1171 // If we have an explicit instantiation declaration (and not a
1172 // definition), the vtable is defined elsewhere.
1173 TemplateSpecializationKind TSK = RD->getTemplateSpecializationKind();
1174 if (TSK == TSK_ExplicitInstantiationDeclaration)
1175 return true;
1176
1177 // Otherwise, if the class is an instantiated template, the
1178 // vtable must be defined here.
1179 if (TSK == TSK_ImplicitInstantiation ||
1180 TSK == TSK_ExplicitInstantiationDefinition)
1181 return false;
1182
1183 // Otherwise, if the class doesn't have a key function (possibly
1184 // anymore), the vtable must be defined here.
1185 const CXXMethodDecl *keyFunction = CGM.getContext().getCurrentKeyFunction(RD);
1186 if (!keyFunction)
1187 return false;
1188
1189 const FunctionDecl *Def;
1190 // Otherwise, if we don't have a definition of the key function, the
1191 // vtable must be defined somewhere else.
1192 if (!keyFunction->hasBody(Def))
1193 return true;
1194
1195 assert(Def && "The body of the key function is not assigned to Def?");
1196 // If the non-inline key function comes from another module unit, the vtable
1197 // must be defined there.
1198 return Def->isInAnotherModuleUnit() && !Def->isInlineSpecified();
1199}
1200
1201/// Given that we're currently at the end of the translation unit, and
1202/// we've emitted a reference to the vtable for this class, should
1203/// we define that vtable?
1204static bool shouldEmitVTableAtEndOfTranslationUnit(CodeGenModule &CGM,
1205 const CXXRecordDecl *RD) {
1206 // If vtable is internal then it has to be done.
1207 if (!CGM.getVTables().isVTableExternal(RD))
1208 return true;
1209
1210 // If it's external then maybe we will need it as available_externally.
1211 return shouldEmitAvailableExternallyVTable(CGM, RD);
1212}
1213
1214/// Given that at some point we emitted a reference to one or more
1215/// vtables, and that we are now at the end of the translation unit,
1216/// decide whether we should emit them.
1217void CodeGenModule::EmitDeferredVTables() {
1218#ifndef NDEBUG
1219 // Remember the size of DeferredVTables, because we're going to assume
1220 // that this entire operation doesn't modify it.
1221 size_t savedSize = DeferredVTables.size();
1222#endif
1223
1224 for (const CXXRecordDecl *RD : DeferredVTables)
1225 if (shouldEmitVTableAtEndOfTranslationUnit(CGM&: *this, RD))
1226 VTables.GenerateClassData(RD);
1227 else if (shouldOpportunisticallyEmitVTables())
1228 OpportunisticVTables.push_back(x: RD);
1229
1230 assert(savedSize == DeferredVTables.size() &&
1231 "deferred extra vtables during vtable emission?");
1232 DeferredVTables.clear();
1233}
1234
1235bool CodeGenModule::AlwaysHasLTOVisibilityPublic(const CXXRecordDecl *RD) {
1236 if (RD->hasAttr<LTOVisibilityPublicAttr>() || RD->hasAttr<UuidAttr>() ||
1237 RD->hasAttr<DLLExportAttr>() || RD->hasAttr<DLLImportAttr>())
1238 return true;
1239
1240 if (!getCodeGenOpts().LTOVisibilityPublicStd)
1241 return false;
1242
1243 const DeclContext *DC = RD;
1244 while (true) {
1245 auto *D = cast<Decl>(Val: DC);
1246 DC = DC->getParent();
1247 if (isa<TranslationUnitDecl>(Val: DC->getRedeclContext())) {
1248 if (auto *ND = dyn_cast<NamespaceDecl>(D))
1249 if (const IdentifierInfo *II = ND->getIdentifier())
1250 if (II->isStr(Str: "std") || II->isStr(Str: "stdext"))
1251 return true;
1252 break;
1253 }
1254 }
1255
1256 return false;
1257}
1258
1259bool CodeGenModule::HasHiddenLTOVisibility(const CXXRecordDecl *RD) {
1260 LinkageInfo LV = RD->getLinkageAndVisibility();
1261 if (!isExternallyVisible(L: LV.getLinkage()))
1262 return true;
1263
1264 if (!getTriple().isOSBinFormatCOFF() &&
1265 LV.getVisibility() != HiddenVisibility)
1266 return false;
1267
1268 return !AlwaysHasLTOVisibilityPublic(RD);
1269}
1270
1271llvm::GlobalObject::VCallVisibility CodeGenModule::GetVCallVisibilityLevel(
1272 const CXXRecordDecl *RD, llvm::DenseSet<const CXXRecordDecl *> &Visited) {
1273 // If we have already visited this RD (which means this is a recursive call
1274 // since the initial call should have an empty Visited set), return the max
1275 // visibility. The recursive calls below compute the min between the result
1276 // of the recursive call and the current TypeVis, so returning the max here
1277 // ensures that it will have no effect on the current TypeVis.
1278 if (!Visited.insert(V: RD).second)
1279 return llvm::GlobalObject::VCallVisibilityTranslationUnit;
1280
1281 LinkageInfo LV = RD->getLinkageAndVisibility();
1282 llvm::GlobalObject::VCallVisibility TypeVis;
1283 if (!isExternallyVisible(L: LV.getLinkage()))
1284 TypeVis = llvm::GlobalObject::VCallVisibilityTranslationUnit;
1285 else if (HasHiddenLTOVisibility(RD))
1286 TypeVis = llvm::GlobalObject::VCallVisibilityLinkageUnit;
1287 else
1288 TypeVis = llvm::GlobalObject::VCallVisibilityPublic;
1289
1290 for (const auto &B : RD->bases())
1291 if (B.getType()->getAsCXXRecordDecl()->isDynamicClass())
1292 TypeVis = std::min(
1293 a: TypeVis,
1294 b: GetVCallVisibilityLevel(RD: B.getType()->getAsCXXRecordDecl(), Visited));
1295
1296 for (const auto &B : RD->vbases())
1297 if (B.getType()->getAsCXXRecordDecl()->isDynamicClass())
1298 TypeVis = std::min(
1299 a: TypeVis,
1300 b: GetVCallVisibilityLevel(RD: B.getType()->getAsCXXRecordDecl(), Visited));
1301
1302 return TypeVis;
1303}
1304
1305void CodeGenModule::EmitVTableTypeMetadata(const CXXRecordDecl *RD,
1306 llvm::GlobalVariable *VTable,
1307 const VTableLayout &VTLayout) {
1308 // Emit type metadata on vtables with LTO or IR instrumentation.
1309 // In IR instrumentation, the type metadata is used to find out vtable
1310 // definitions (for type profiling) among all global variables.
1311 if (!getCodeGenOpts().LTOUnit && !getCodeGenOpts().hasProfileIRInstr())
1312 return;
1313
1314 CharUnits ComponentWidth = GetTargetTypeStoreSize(Ty: getVTableComponentType());
1315
1316 struct AddressPoint {
1317 const CXXRecordDecl *Base;
1318 size_t Offset;
1319 std::string TypeName;
1320 bool operator<(const AddressPoint &RHS) const {
1321 int D = TypeName.compare(str: RHS.TypeName);
1322 return D < 0 || (D == 0 && Offset < RHS.Offset);
1323 }
1324 };
1325 std::vector<AddressPoint> AddressPoints;
1326 for (auto &&AP : VTLayout.getAddressPoints()) {
1327 AddressPoint N{.Base: AP.first.getBase(),
1328 .Offset: VTLayout.getVTableOffset(i: AP.second.VTableIndex) +
1329 AP.second.AddressPointIndex,
1330 .TypeName: {}};
1331 llvm::raw_string_ostream Stream(N.TypeName);
1332 getCXXABI().getMangleContext().mangleCanonicalTypeName(
1333 T: QualType(N.Base->getTypeForDecl(), 0), Stream);
1334 AddressPoints.push_back(x: std::move(N));
1335 }
1336
1337 // Sort the address points for determinism.
1338 llvm::sort(C&: AddressPoints);
1339
1340 ArrayRef<VTableComponent> Comps = VTLayout.vtable_components();
1341 for (auto AP : AddressPoints) {
1342 // Create type metadata for the address point.
1343 AddVTableTypeMetadata(VTable, Offset: ComponentWidth * AP.Offset, RD: AP.Base);
1344
1345 // The class associated with each address point could also potentially be
1346 // used for indirect calls via a member function pointer, so we need to
1347 // annotate the address of each function pointer with the appropriate member
1348 // function pointer type.
1349 for (unsigned I = 0; I != Comps.size(); ++I) {
1350 if (Comps[I].getKind() != VTableComponent::CK_FunctionPointer)
1351 continue;
1352 llvm::Metadata *MD = CreateMetadataIdentifierForVirtualMemPtrType(
1353 T: Context.getMemberPointerType(
1354 T: Comps[I].getFunctionDecl()->getType(),
1355 Cls: Context.getRecordType(AP.Base).getTypePtr()));
1356 VTable->addTypeMetadata(Offset: (ComponentWidth * I).getQuantity(), TypeID: MD);
1357 }
1358 }
1359
1360 if (getCodeGenOpts().VirtualFunctionElimination ||
1361 getCodeGenOpts().WholeProgramVTables) {
1362 llvm::DenseSet<const CXXRecordDecl *> Visited;
1363 llvm::GlobalObject::VCallVisibility TypeVis =
1364 GetVCallVisibilityLevel(RD, Visited);
1365 if (TypeVis != llvm::GlobalObject::VCallVisibilityPublic)
1366 VTable->setVCallVisibilityMetadata(TypeVis);
1367 }
1368}
1369

source code of clang/lib/CodeGen/CGVTables.cpp