13#define MIBEntryDef(NameTag, Name, Type) List.push_back(Meta::Name);
20 return {Meta::AllocCount, Meta::TotalSize, Meta::TotalLifetime,
21 Meta::TotalLifetimeAccessDensity};
62 Result +=
N.serializedSize(Schema,
Version2);
76 Result +=
N.serializedSize(Schema,
Version3);
91 Result +=
N.serializedSize(Schema,
Version4);
95 for (
const auto &CS :
Record.CallSites)
123 N.Info.serialize(Schema, OS);
128 for (
const auto &CS :
Record.CallSites)
144 N.Info.serialize(Schema, OS);
149 for (
const auto &CS :
Record.CallSites) {
167 N.Info.serialize(Schema, OS);
172 for (
const auto &CS :
Record.CallSites) {
175 LE.write<
uint64_t>(CS.CalleeGuids.size());
176 for (
const auto &
Guid : CS.CalleeGuids)
190 serializeV3(*
this, Schema, OS, *MemProfCallStackIndexes);
193 serializeV4(*
this, Schema, OS, *MemProfCallStackIndexes);
200 const unsigned char *
Ptr) {
208 Record.AllocSites.reserve(NumNodes);
212 Node.Info.deserialize(Schema,
Ptr);
220 Record.CallSites.reserve(NumCtxs);
221 for (
uint64_t J = 0; J < NumCtxs; J++) {
224 Record.CallSites.emplace_back(CSId);
231 const unsigned char *
Ptr) {
239 Record.AllocSites.reserve(NumNodes);
245 Node.Info.deserialize(Schema,
Ptr);
246 Ptr += SerializedSize;
253 Record.CallSites.reserve(NumCtxs);
254 for (
uint64_t J = 0; J < NumCtxs; J++) {
261 Record.CallSites.emplace_back(CSId);
268 const unsigned char *
Ptr) {
276 Record.AllocSites.reserve(NumNodes);
282 Node.Info.deserialize(Schema,
Ptr);
283 Ptr += SerializedSize;
290 Record.CallSites.reserve(NumCtxs);
291 for (
uint64_t J = 0; J < NumCtxs; J++) {
299 for (
uint64_t K = 0; K < NumGuids; ++K)
302 Record.CallSites.emplace_back(CSId, std::move(Guids));
310 const unsigned char *
Ptr,
330 AI.
Info = IndexedAI.Info;
332 Record.AllocSites.push_back(std::move(AI));
337 std::vector<Frame> Frames = Callback(CS.CSId);
338 Record.CallSites.emplace_back(std::move(Frames), CS.CalleeGuids);
363 const unsigned char *
Ptr = Buffer;
366 if (NumSchemaIds >
static_cast<uint64_t>(Meta::Size)) {
368 "memprof schema invalid");
372 for (
size_t I = 0;
I < NumSchemaIds;
I++) {
377 "memprof schema invalid");
379 Result.push_back(
static_cast<Meta>(
Tag));
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
This file defines the SmallVector class.
bool contains(const_arg_type_t< KeyT > Val) const
Return true if the specified key is in the map, false otherwise.
Tagged union holding either a T or a Error.
static LLVM_ABI GUID getGUIDAssumingExternalLinkage(StringRef GlobalName)
Return a 64-bit global unique ID constructed from the name of a global symbol.
uint64_t GUID
Declare a type to represent a global unique identifier for a global value.
void reserve(size_type N)
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
StringRef - Represent a constant reference to a string, i.e.
An efficient, type-erasing, non-owning reference to a callable.
This class implements an extremely fast bulk output stream that can only output to a stream.
static StringRef getCanonicalFnName(const Function &F)
Return the canonical name for a function, taking into account suffix elision policy attributes.
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
static IndexedMemProfRecord deserializeV4(const MemProfSchema &Schema, const unsigned char *Ptr)
uint32_t LinearCallStackId
static IndexedMemProfRecord deserializeV3(const MemProfSchema &Schema, const unsigned char *Ptr)
static void serializeV3(const IndexedMemProfRecord &Record, const MemProfSchema &Schema, raw_ostream &OS, llvm::DenseMap< CallStackId, LinearCallStackId > &MemProfCallStackIndexes)
LLVM_ABI MemProfSchema getHotColdSchema()
static size_t serializedSizeV2(const IndexedAllocationInfo &IAI, const MemProfSchema &Schema)
llvm::SmallVector< Meta, static_cast< int >(Meta::Size)> MemProfSchema
static size_t serializedSizeV3(const IndexedAllocationInfo &IAI, const MemProfSchema &Schema)
LLVM_ABI MemProfSchema getFullSchema()
LLVM_ABI GlobalValue::GUID getGUID(const StringRef FunctionName)
static void serializeV4(const IndexedMemProfRecord &Record, const MemProfSchema &Schema, raw_ostream &OS, llvm::DenseMap< CallStackId, LinearCallStackId > &MemProfCallStackIndexes)
LLVM_ABI Expected< MemProfSchema > readMemProfSchema(const unsigned char *&Buffer)
static void serializeV2(const IndexedMemProfRecord &Record, const MemProfSchema &Schema, raw_ostream &OS)
static IndexedMemProfRecord deserializeV2(const MemProfSchema &Schema, const unsigned char *Ptr)
static size_t serializedSizeV4(const IndexedMemProfRecord &Record, const MemProfSchema &Schema)
value_type readNext(const CharT *&memory, endianness endian)
Read a value of a particular endianness from a buffer, and increment the buffer past that value.
This is an optimization pass for GlobalISel generic memory operations.
FunctionAddr VTableAddr uintptr_t uintptr_t Version
Error make_error(ArgTs &&... Args)
Make a Error instance representing failure using the given error info type.
std::vector< Frame > CallStack
PortableMemInfoBlock Info
LLVM_ABI size_t serializedSize(const MemProfSchema &Schema, IndexedVersion Version) const
llvm::SmallVector< IndexedAllocationInfo > AllocSites
LLVM_ABI size_t serializedSize(const MemProfSchema &Schema, IndexedVersion Version) const
LLVM_ABI void serialize(const MemProfSchema &Schema, raw_ostream &OS, IndexedVersion Version, llvm::DenseMap< CallStackId, LinearCallStackId > *MemProfCallStackIndexes=nullptr) const
static LLVM_ABI IndexedMemProfRecord deserialize(const MemProfSchema &Schema, const unsigned char *Buffer, IndexedVersion Version)
llvm::SmallVector< IndexedCallSiteInfo > CallSites
LLVM_ABI MemProfRecord toMemProfRecord(llvm::function_ref< std::vector< Frame >(const CallStackId)> Callback) const
static size_t serializedSize(const MemProfSchema &Schema)
Adapter to write values to a stream in a particular byte order.