1*0b57cec5SDimitry Andric //===--- PatternInit.cpp - Pattern Initialization -------------------------===//
2*0b57cec5SDimitry Andric //
3*0b57cec5SDimitry Andric // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4*0b57cec5SDimitry Andric // See https://llvm.org/LICENSE.txt for license information.
5*0b57cec5SDimitry Andric // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6*0b57cec5SDimitry Andric //
7*0b57cec5SDimitry Andric //===----------------------------------------------------------------------===//
8*0b57cec5SDimitry Andric
9*0b57cec5SDimitry Andric #include "PatternInit.h"
10*0b57cec5SDimitry Andric #include "CodeGenModule.h"
11*0b57cec5SDimitry Andric #include "clang/Basic/TargetInfo.h"
12*0b57cec5SDimitry Andric #include "llvm/IR/Constant.h"
13*0b57cec5SDimitry Andric #include "llvm/IR/Type.h"
14*0b57cec5SDimitry Andric
initializationPatternFor(CodeGenModule & CGM,llvm::Type * Ty)15*0b57cec5SDimitry Andric llvm::Constant *clang::CodeGen::initializationPatternFor(CodeGenModule &CGM,
16*0b57cec5SDimitry Andric llvm::Type *Ty) {
17*0b57cec5SDimitry Andric // The following value is a guaranteed unmappable pointer value and has a
18*0b57cec5SDimitry Andric // repeated byte-pattern which makes it easier to synthesize. We use it for
19*0b57cec5SDimitry Andric // pointers as well as integers so that aggregates are likely to be
20*0b57cec5SDimitry Andric // initialized with this repeated value.
21*0b57cec5SDimitry Andric // For 32-bit platforms it's a bit trickier because, across systems, only the
22*0b57cec5SDimitry Andric // zero page can reasonably be expected to be unmapped. We use max 0xFFFFFFFF
23*0b57cec5SDimitry Andric // assuming that memory access will overlap into zero page.
24*0b57cec5SDimitry Andric const uint64_t IntValue =
25*0b57cec5SDimitry Andric CGM.getContext().getTargetInfo().getMaxPointerWidth() < 64
26*0b57cec5SDimitry Andric ? 0xFFFFFFFFFFFFFFFFull
27*0b57cec5SDimitry Andric : 0xAAAAAAAAAAAAAAAAull;
28*0b57cec5SDimitry Andric // Floating-point values are initialized as NaNs because they propagate. Using
29*0b57cec5SDimitry Andric // a repeated byte pattern means that it will be easier to initialize
30*0b57cec5SDimitry Andric // all-floating-point aggregates and arrays with memset. Further, aggregates
31*0b57cec5SDimitry Andric // which mix integral and a few floats might also initialize with memset
32*0b57cec5SDimitry Andric // followed by a handful of stores for the floats. Using fairly unique NaNs
33*0b57cec5SDimitry Andric // also means they'll be easier to distinguish in a crash.
34*0b57cec5SDimitry Andric constexpr bool NegativeNaN = true;
35*0b57cec5SDimitry Andric constexpr uint64_t NaNPayload = 0xFFFFFFFFFFFFFFFFull;
36*0b57cec5SDimitry Andric if (Ty->isIntOrIntVectorTy()) {
37*0b57cec5SDimitry Andric unsigned BitWidth =
38*0b57cec5SDimitry Andric cast<llvm::IntegerType>(Ty->getScalarType())->getBitWidth();
39*0b57cec5SDimitry Andric if (BitWidth <= 64)
40*0b57cec5SDimitry Andric return llvm::ConstantInt::get(Ty, IntValue);
41*0b57cec5SDimitry Andric return llvm::ConstantInt::get(
42*0b57cec5SDimitry Andric Ty, llvm::APInt::getSplat(BitWidth, llvm::APInt(64, IntValue)));
43*0b57cec5SDimitry Andric }
44*0b57cec5SDimitry Andric if (Ty->isPtrOrPtrVectorTy()) {
45*0b57cec5SDimitry Andric auto *PtrTy = cast<llvm::PointerType>(Ty->getScalarType());
46*0b57cec5SDimitry Andric unsigned PtrWidth =
47*0b57cec5SDimitry Andric CGM.getDataLayout().getPointerSizeInBits(PtrTy->getAddressSpace());
48*0b57cec5SDimitry Andric if (PtrWidth > 64)
49*0b57cec5SDimitry Andric llvm_unreachable("pattern initialization of unsupported pointer width");
50*0b57cec5SDimitry Andric llvm::Type *IntTy = llvm::IntegerType::get(CGM.getLLVMContext(), PtrWidth);
51*0b57cec5SDimitry Andric auto *Int = llvm::ConstantInt::get(IntTy, IntValue);
52*0b57cec5SDimitry Andric return llvm::ConstantExpr::getIntToPtr(Int, PtrTy);
53*0b57cec5SDimitry Andric }
54*0b57cec5SDimitry Andric if (Ty->isFPOrFPVectorTy()) {
55*0b57cec5SDimitry Andric unsigned BitWidth = llvm::APFloat::semanticsSizeInBits(
56*0b57cec5SDimitry Andric Ty->getScalarType()->getFltSemantics());
57*0b57cec5SDimitry Andric llvm::APInt Payload(64, NaNPayload);
58*0b57cec5SDimitry Andric if (BitWidth >= 64)
59*0b57cec5SDimitry Andric Payload = llvm::APInt::getSplat(BitWidth, Payload);
60*0b57cec5SDimitry Andric return llvm::ConstantFP::getQNaN(Ty, NegativeNaN, &Payload);
61*0b57cec5SDimitry Andric }
62*0b57cec5SDimitry Andric if (Ty->isArrayTy()) {
63*0b57cec5SDimitry Andric // Note: this doesn't touch tail padding (at the end of an object, before
64*0b57cec5SDimitry Andric // the next array object). It is instead handled by replaceUndef.
65*0b57cec5SDimitry Andric auto *ArrTy = cast<llvm::ArrayType>(Ty);
66*0b57cec5SDimitry Andric llvm::SmallVector<llvm::Constant *, 8> Element(
67*0b57cec5SDimitry Andric ArrTy->getNumElements(),
68*0b57cec5SDimitry Andric initializationPatternFor(CGM, ArrTy->getElementType()));
69*0b57cec5SDimitry Andric return llvm::ConstantArray::get(ArrTy, Element);
70*0b57cec5SDimitry Andric }
71*0b57cec5SDimitry Andric
72*0b57cec5SDimitry Andric // Note: this doesn't touch struct padding. It will initialize as much union
73*0b57cec5SDimitry Andric // padding as is required for the largest type in the union. Padding is
74*0b57cec5SDimitry Andric // instead handled by replaceUndef. Stores to structs with volatile members
75*0b57cec5SDimitry Andric // don't have a volatile qualifier when initialized according to C++. This is
76*0b57cec5SDimitry Andric // fine because stack-based volatiles don't really have volatile semantics
77*0b57cec5SDimitry Andric // anyways, and the initialization shouldn't be observable.
78*0b57cec5SDimitry Andric auto *StructTy = cast<llvm::StructType>(Ty);
79*0b57cec5SDimitry Andric llvm::SmallVector<llvm::Constant *, 8> Struct(StructTy->getNumElements());
80*0b57cec5SDimitry Andric for (unsigned El = 0; El != Struct.size(); ++El)
81*0b57cec5SDimitry Andric Struct[El] = initializationPatternFor(CGM, StructTy->getElementType(El));
82*0b57cec5SDimitry Andric return llvm::ConstantStruct::get(StructTy, Struct);
83*0b57cec5SDimitry Andric }
84*0b57cec5SDimitry Andric