61 static std::pair<offset_type, offset_type>
71 for (
const auto &ProfileData : *V) {
80 M += ValueProfData::getSize(ProfileData.second);
84 return std::make_pair(
N, M);
95 for (
const auto &ProfileData : *V) {
102 LE.write<
uint64_t>(ProfileData.first);
112 std::unique_ptr<ValueProfData> VDataPtr =
113 ValueProfData::serializeFrom(ProfileData.second);
116 Out.
write((
const char *)VDataPtr.get(), S);
124 bool Sparse,
uint64_t TemporalProfTraceReservoirSize,
125 uint64_t MaxTemporalProfTraceLength,
bool WritePrevVersion,
127 bool MemprofGenerateRandomHotness,
128 unsigned MemprofGenerateRandomHotnessSeed)
129 : Sparse(Sparse), MaxTemporalProfTraceLength(MaxTemporalProfTraceLength),
130 TemporalProfTraceReservoirSize(TemporalProfTraceReservoirSize),
132 WritePrevVersion(WritePrevVersion),
133 MemProfVersionRequested(MemProfVersionRequested),
134 MemProfFullSchema(MemProfFullSchema),
135 MemprofGenerateRandomHotness(MemprofGenerateRandomHotness) {
137 if (MemprofGenerateRandomHotness) {
138 unsigned seed = MemprofGenerateRandomHotnessSeed
139 ? MemprofGenerateRandomHotnessSeed
140 : std::time(
nullptr);
141 errs() <<
"random hotness seed = " << seed <<
"\n";
150 InfoObj->ValueProfDataEndianness = Endianness;
159 addRecord(Name, Hash, std::move(
I), Weight, Warn);
166 auto Name =
Other.Name;
167 auto Hash =
Other.Hash;
168 Other.accumulateCounts(FuncLevelOverlap.
Test);
169 auto It = FunctionData.find(Name);
170 if (It == FunctionData.end()) {
178 auto &ProfileDataMap = It->second;
179 auto [Where, NewFunc] = ProfileDataMap.try_emplace(Hash);
190 Dest.
overlap(
Other, Overlap, FuncLevelOverlap, ValueCutoff);
196 auto &ProfileDataMap = FunctionData[Name];
198 auto [Where, NewFunc] = ProfileDataMap.try_emplace(Hash);
209 Dest.
scale(Weight, 1, MapWarn);
212 Dest.
merge(
I, Weight, MapWarn);
218void InstrProfWriter::addMemProfRecord(
220 auto NewRecord = Record;
224 if (MemprofGenerateRandomHotness) {
225 for (
auto &
Alloc : NewRecord.AllocSites) {
228 uint64_t NewTLAD = std::numeric_limits<uint64_t>::max();
230 bool IsCold = std::rand() % 2;
235 NewTL = std::numeric_limits<uint64_t>::max();
237 Alloc.Info.setTotalLifetimeAccessDensity(NewTLAD);
238 Alloc.Info.setTotalLifetime(NewTL);
241 MemProfSumBuilder.addRecord(NewRecord);
242 auto [Iter,
Inserted] = MemProfData.Records.insert({
Id, NewRecord});
247 memprof::IndexedMemProfRecord &Existing = Iter->second;
248 Existing.
merge(NewRecord);
254 auto [Iter,
Inserted] = MemProfData.Frames.insert({
Id, Frame});
259 if (!Inserted && Iter->second != Frame) {
261 "frame to id mapping mismatch"));
267bool InstrProfWriter::addMemProfCallStack(
271 auto [Iter,
Inserted] = MemProfData.CallStacks.insert({CSId, CallStack});
276 if (!Inserted && Iter->second != CallStack) {
278 "call stack to id mapping mismatch"));
295 if (MemProfData.Frames.empty())
296 MemProfData.Frames = std::move(
Incoming.Frames);
298 for (
const auto &[Id,
F] :
Incoming.Frames)
299 if (addMemProfFrame(Id,
F, Warn))
302 if (MemProfData.CallStacks.empty())
303 MemProfData.CallStacks = std::move(
Incoming.CallStacks);
305 for (
const auto &[CSId, CS] :
Incoming.CallStacks)
306 if (addMemProfCallStack(CSId, CS, Warn))
310 if (MemProfData.Records.empty() && !MemprofGenerateRandomHotness) {
314 MemProfSumBuilder.addRecord(
Record);
315 MemProfData.Records = std::move(
Incoming.Records);
318 addMemProfRecord(GUID,
Record);
329 std::unique_ptr<memprof::DataAccessProfData> DataAccessProfDataIn) {
330 DataAccessProfileData = std::move(DataAccessProfDataIn);
335 if (TemporalProfTraces.size() > TemporalProfTraceReservoirSize)
336 TemporalProfTraces.truncate(TemporalProfTraceReservoirSize);
337 for (
auto &
Trace : SrcTraces)
338 if (
Trace.FunctionNameRefs.
size() > MaxTemporalProfTraceLength)
339 Trace.FunctionNameRefs.resize(MaxTemporalProfTraceLength);
340 llvm::erase_if(SrcTraces, [](
auto &
T) {
return T.FunctionNameRefs.empty(); });
344 if (SrcTraces.
empty())
347 auto SrcTraceIt = SrcTraces.
begin();
348 while (TemporalProfTraces.size() < TemporalProfTraceReservoirSize &&
349 SrcTraceIt < SrcTraces.
end())
350 TemporalProfTraces.push_back(*SrcTraceIt++);
353 for (
uint64_t I = TemporalProfTraces.size();
354 I < SrcStreamSize && SrcTraceIt < SrcTraces.
end();
I++) {
355 std::uniform_int_distribution<uint64_t> Distribution(0,
I);
356 uint64_t RandomIndex = Distribution(RNG);
357 if (RandomIndex < TemporalProfTraces.size())
358 TemporalProfTraces[RandomIndex] = *SrcTraceIt++;
360 TemporalProfTraceStreamSize += SrcStreamSize;
365 for (
auto &
I : IPW.FunctionData)
366 for (
auto &Func :
I.getValue())
367 addRecord(
I.getKey(), Func.first, std::move(Func.second), 1, Warn);
369 BinaryIds.reserve(BinaryIds.size() + IPW.BinaryIds.size());
370 for (
auto &
I : IPW.BinaryIds)
374 IPW.TemporalProfTraceStreamSize);
376 MemProfData.Frames.reserve(IPW.MemProfData.Frames.size());
377 for (
auto &[FrameId, Frame] : IPW.MemProfData.Frames) {
380 if (!addMemProfFrame(FrameId, Frame, Warn))
384 MemProfData.CallStacks.reserve(IPW.MemProfData.CallStacks.size());
385 for (
auto &[CSId, CallStack] : IPW.MemProfData.CallStacks) {
386 if (!addMemProfCallStack(CSId, CallStack, Warn))
390 MemProfData.Records.reserve(IPW.MemProfData.Records.size());
391 for (
auto &[GUID,
Record] : IPW.MemProfData.Records) {
392 addMemProfRecord(GUID,
Record);
396bool InstrProfWriter::shouldEncodeData(
const ProfilingData &PD) {
399 for (
const auto &Func : PD) {
422 for (
unsigned I = 0;
I < Res.size();
I++)
427 const bool WritePrevVersion,
430 for (
int I = 0;
I < 4;
I++)
431 OS.
write(
reinterpret_cast<const uint64_t *
>(&Header)[
I]);
434 auto BackPatchStartOffset = OS.
tell();
441 if (!WritePrevVersion)
444 return BackPatchStartOffset;
455 uint64_t BinaryIdsSectionSize = 0;
459 BinaryIds.erase(
llvm::unique(BinaryIds), BinaryIds.end());
461 for (
const auto &BI : BinaryIds) {
463 BinaryIdsSectionSize +=
sizeof(uint64_t);
468 OS.
write(BinaryIdsSectionSize);
470 for (
const auto &BI : BinaryIds) {
471 uint64_t BILen = BI.size();
475 for (
unsigned K = 0;
K < BILen;
K++)
478 uint64_t PaddingSize =
alignToPowerOf2(BILen,
sizeof(uint64_t)) - BILen;
479 for (
unsigned K = 0;
K < PaddingSize;
K++)
487 std::vector<std::string> VTableNameStrs;
488 for (StringRef VTableName : VTableNames.keys())
489 VTableNameStrs.push_back(VTableName.str());
491 std::string CompressedVTableNames;
492 if (!VTableNameStrs.empty())
495 CompressedVTableNames))
498 const uint64_t CompressedStringLen = CompressedVTableNames.length();
501 OS.
write(CompressedStringLen);
504 for (
auto &c : CompressedVTableNames)
509 const uint64_t PaddedLength =
alignTo(CompressedStringLen, 8);
511 for (uint64_t K = CompressedStringLen;
K < PaddedLength;
K++)
518 using namespace IndexedInstrProf;
519 using namespace support;
521 OnDiskChainedHashTableGenerator<InstrProfRecordWriterTrait> Generator;
524 InfoObj->SummaryBuilder = &ISB;
526 InfoObj->CSSummaryBuilder = &CSISB;
530 for (
const auto &
I : FunctionData)
531 if (shouldEncodeData(
I.getValue()))
534 for (
const auto &
I : OrderedData)
535 Generator.insert(
I.first,
I.second);
538 IndexedInstrProf::Header Header;
539 Header.Version = WritePrevVersion
550 if (
static_cast<bool>(ProfileKind &
553 if (
static_cast<bool>(ProfileKind &
565 const uint64_t BackPatchStartOffset =
566 writeHeader(Header, WritePrevVersion, OS);
570 uint32_t SummarySize = Summary::getSize(Summary::NumKinds, NumEntries);
572 uint64_t SummaryOffset = OS.
tell();
573 for (
unsigned I = 0;
I < SummarySize /
sizeof(uint64_t);
I++)
575 uint64_t CSSummaryOffset = 0;
576 uint64_t CSSummarySize = 0;
578 CSSummaryOffset = OS.
tell();
579 CSSummarySize = SummarySize /
sizeof(uint64_t);
580 for (
unsigned I = 0;
I < CSSummarySize;
I++)
585 uint64_t HashTableStart = Generator.
Emit(OS.
OS, *InfoObj);
588 uint64_t MemProfSectionStart = 0;
590 MemProfSectionStart = OS.
tell();
593 OS, MemProfData, MemProfVersionRequested, MemProfFullSchema,
594 std::move(DataAccessProfileData), MemProfSumBuilder.getSummary()))
598 uint64_t BinaryIdSectionStart = OS.
tell();
599 if (
auto E = writeBinaryIds(OS))
602 uint64_t VTableNamesSectionStart = OS.
tell();
604 if (!WritePrevVersion)
605 if (
Error E = writeVTableNames(OS))
608 uint64_t TemporalProfTracesSectionStart = 0;
610 TemporalProfTracesSectionStart = OS.
tell();
611 OS.
write(TemporalProfTraces.size());
612 OS.
write(TemporalProfTraceStreamSize);
613 for (
auto &Trace : TemporalProfTraces) {
614 OS.
write(Trace.Weight);
615 OS.
write(Trace.FunctionNameRefs.size());
616 for (
auto &NameRef : Trace.FunctionNameRefs)
622 std::unique_ptr<IndexedInstrProf::Summary> TheSummary =
626 std::unique_ptr<ProfileSummary> PS = ISB.getSummary();
628 InfoObj->SummaryBuilder =
nullptr;
631 std::unique_ptr<IndexedInstrProf::Summary> TheCSSummary =
nullptr;
634 std::unique_ptr<ProfileSummary> CSPS = CSISB.getSummary();
637 InfoObj->CSSummaryBuilder =
nullptr;
639 SmallVector<uint64_t, 8> HeaderOffsets = {HashTableStart, MemProfSectionStart,
640 BinaryIdSectionStart,
641 TemporalProfTracesSectionStart};
642 if (!WritePrevVersion)
643 HeaderOffsets.
push_back(VTableNamesSectionStart);
645 PatchItem PatchItems[] = {
647 {BackPatchStartOffset, HeaderOffsets},
650 ArrayRef<uint64_t>(
reinterpret_cast<uint64_t *
>(TheSummary.get()),
651 SummarySize /
sizeof(uint64_t))},
653 ArrayRef<uint64_t>(
reinterpret_cast<uint64_t *
>(TheCSSummary.get()),
656 OS.
patch(PatchItems);
658 for (
const auto &
I : FunctionData)
659 for (
const auto &
F :
I.getValue())
669 return writeImpl(POS);
674 return writeImpl(POS);
688#define VALUE_PROF_KIND(Enumerator, Value, Descr) #Enumerator,
693 for (
uint32_t VK = 0; VK <= IPVK_Last; VK++) {
694 if (VK == IPVK_IndirectCallTarget || VK == IPVK_VTableTarget)
696 uint32_t NS = Func.getNumValueSites(VK);
699 for (
const auto &V : Func.getValueArrayForSite(VK, S))
700 if (!SeenValues.
insert(V.Value).second)
713 OS <<
"# Func Hash:\n" << Hash <<
"\n";
714 OS <<
"# Num Counters:\n" << Func.Counts.size() <<
"\n";
715 OS <<
"# Counter Values:\n";
719 if (Func.BitmapBytes.size() > 0) {
720 OS <<
"# Num Bitmap Bytes:\n$" << Func.BitmapBytes.size() <<
"\n";
721 OS <<
"# Bitmap Byte Values:\n";
722 for (
uint8_t Byte : Func.BitmapBytes) {
730 uint32_t NumValueKinds = Func.getNumValueKinds();
731 if (!NumValueKinds) {
736 OS <<
"# Num Value Kinds:\n" << Func.getNumValueKinds() <<
"\n";
737 for (
uint32_t VK = 0; VK < IPVK_Last + 1; VK++) {
738 uint32_t NS = Func.getNumValueSites(VK);
742 OS <<
"# NumValueSites:\n" << NS <<
"\n";
744 auto VD = Func.getValueArrayForSite(VK, S);
745 OS << VD.size() <<
"\n";
746 for (
const auto &V : VD) {
747 if (VK == IPVK_IndirectCallTarget || VK == IPVK_VTableTarget)
751 OS << V.Value <<
":" << V.Count <<
"\n";
762 OS <<
"# CSIR level Instrumentation Flag\n:csir\n";
764 OS <<
"# IR level Instrumentation Flag\n:ir\n";
766 if (
static_cast<bool>(ProfileKind &
768 OS <<
"# Always instrument the function entry block\n:entry_first\n";
769 if (
static_cast<bool>(ProfileKind &
771 OS <<
"# Always instrument the loop entry "
772 "blocks\n:instrument_loop_entries\n";
774 OS <<
"# Instrument block coverage\n:single_byte_coverage\n";
778 using RecordType = std::pair<StringRef, FuncPair>;
781 for (
const auto &
I : FunctionData) {
782 if (shouldEncodeData(
I.getValue())) {
785 for (
const auto &Func :
I.getValue())
786 OrderedFuncData.
push_back(std::make_pair(
I.getKey(), Func));
790 for (
const auto &VTableName : VTableNames)
798 return std::tie(
A.first,
A.second.first) <
799 std::tie(
B.first,
B.second.first);
802 for (
const auto &record : OrderedFuncData) {
804 const FuncPair &Func = record.second;
808 for (
const auto &record : OrderedFuncData) {
809 const FuncPair &Func = record.second;
819 OS <<
":temporal_prof_traces\n";
820 OS <<
"# Num Temporal Profile Traces:\n" << TemporalProfTraces.size() <<
"\n";
821 OS <<
"# Temporal Profile Trace Stream Size:\n"
822 << TemporalProfTraceStreamSize <<
"\n";
823 for (
auto &
Trace : TemporalProfTraces) {
824 OS <<
"# Weight:\n" <<
Trace.Weight <<
"\n";
825 for (
auto &NameRef :
Trace.FunctionNameRefs)
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
static GCRegistry::Add< ErlangGC > A("erlang", "erlang-compatible garbage collector")
static GCRegistry::Add< CoreCLRGC > E("coreclr", "CoreCLR-compatible GC")
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
static void setSummary(IndexedInstrProf::Summary *TheSummary, ProfileSummary &PS)
static const char * ValueProfKindStr[]
#define VARIANT_MASK_CSIR_PROF
#define VARIANT_MASK_MEMPROF
#define VARIANT_MASK_TEMPORAL_PROF
#define VARIANT_MASK_IR_PROF
#define VARIANT_MASK_BYTE_COVERAGE
#define VARIANT_MASK_INSTR_ENTRY
#define VARIANT_MASK_FUNCTION_ENTRY_ONLY
#define VARIANT_MASK_INSTR_LOOP_ENTRIES
Defines facilities for reading and writing on-disk hash tables.
FunctionLoweringInfo::StatepointRelocationRecord RecordType
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
size_t size() const
size - Get the array size.
Implements a dense probed hash-table based set.
Lightweight error class with error context and mandatory checking.
static ErrorSuccess success()
Create a success value.
uint64_t GUID
Declare a type to represent a global unique identifier for a global value.
static std::pair< offset_type, offset_type > EmitKeyDataLength(raw_ostream &Out, key_type_ref K, data_type_ref V)
const InstrProfWriter::ProfilingData *const data_type_ref
InstrProfSummaryBuilder * SummaryBuilder
llvm::endianness ValueProfDataEndianness
static hash_value_type ComputeHash(key_type_ref K)
InstrProfSummaryBuilder * CSSummaryBuilder
InstrProfRecordWriterTrait()=default
void EmitKey(raw_ostream &Out, key_type_ref K, offset_type N)
void EmitData(raw_ostream &Out, key_type_ref, data_type_ref V, offset_type)
const InstrProfWriter::ProfilingData *const data_type
A symbol table used for function [IR]PGO name look-up with keys (such as pointers,...
StringRef getFuncOrVarName(uint64_t ValMD5Hash) const
Return name of functions or global variables from the name's md5 hash value.
StringRef getFuncOrVarNameIfDefined(uint64_t ValMD5Hash) const
Just like getFuncOrVarName, except that it will return literal string 'External Symbol' if the functi...
Error addVTableName(StringRef VTableName)
Adds VTableName as a known symbol, and inserts it to a map that tracks all vtable names.
Error addFuncName(StringRef FuncName)
The method name is kept since there are many callers.
LLVM_ABI InstrProfWriter(bool Sparse=false, uint64_t TemporalProfTraceReservoirSize=0, uint64_t MaxTemporalProfTraceLength=0, bool WritePrevVersion=false, memprof::IndexedVersion MemProfVersionRequested=static_cast< memprof::IndexedVersion >(memprof::MinimumSupportedVersion), bool MemProfFullSchema=false, bool MemprofGenerateRandomHotness=false, unsigned MemprofGenerateRandomHotnessSeed=0)
LLVM_ABI Error write(raw_fd_ostream &OS)
Write the profile to OS.
LLVM_ABI void addTemporalProfileTraces(SmallVectorImpl< TemporalProfTraceTy > &SrcTraces, uint64_t SrcStreamSize)
Add SrcTraces using reservoir sampling where SrcStreamSize is the total number of temporal profiling ...
LLVM_ABI void overlapRecord(NamedInstrProfRecord &&Other, OverlapStats &Overlap, OverlapStats &FuncLevelOverlap, const OverlapFuncFilters &FuncFilter)
LLVM_ABI Error writeText(raw_fd_ostream &OS)
Write the profile in text format to OS.
LLVM_ABI void addBinaryIds(ArrayRef< llvm::object::BuildID > BIs)
static LLVM_ABI void writeRecordInText(StringRef Name, uint64_t Hash, const InstrProfRecord &Counters, InstrProfSymtab &Symtab, raw_fd_ostream &OS)
Write Record in text format to OS.
LLVM_ABI void setValueProfDataEndianness(llvm::endianness Endianness)
LLVM_ABI void addRecord(NamedInstrProfRecord &&I, uint64_t Weight, function_ref< void(Error)> Warn)
Add function counts for the given function.
LLVM_ABI ~InstrProfWriter()
LLVM_ABI void mergeRecordsFromWriter(InstrProfWriter &&IPW, function_ref< void(Error)> Warn)
Merge existing function counts from the given writer.
LLVM_ABI void writeTextTemporalProfTraceData(raw_fd_ostream &OS, InstrProfSymtab &Symtab)
Write temporal profile trace data to the header in text format to OS.
SmallDenseMap< uint64_t, InstrProfRecord > ProfilingData
LLVM_ABI std::unique_ptr< MemoryBuffer > writeBuffer()
Write the profile, returning the raw data. For testing.
LLVM_ABI void setOutputSparse(bool Sparse)
LLVM_ABI bool addMemProfData(memprof::IndexedMemProfData Incoming, function_ref< void(Error)> Warn)
Add the entire MemProfData Incoming to the writer context.
LLVM_ABI void addDataAccessProfData(std::unique_ptr< memprof::DataAccessProfData > DataAccessProfile)
LLVM_ABI Error validateRecord(const InstrProfRecord &Func)
static std::unique_ptr< MemoryBuffer > getMemBufferCopy(StringRef InputData, const Twine &BufferName="")
Open the specified memory range as a MemoryBuffer, copying the contents and taking ownership of it.
offset_type Emit(raw_ostream &Out)
Emit the table to Out, which must not be at offset 0.
LLVM_ABI uint64_t tell() const
LLVM_ABI void writeByte(uint8_t V)
LLVM_ABI void patch(ArrayRef< PatchItem > P)
LLVM_ABI void write(uint64_t V)
static LLVM_ABI const ArrayRef< uint32_t > DefaultCutoffs
A vector of useful cutoff values for detailed summary.
uint64_t getTotalCount() const
uint64_t getMaxCount() const
const SummaryEntryVector & getDetailedSummary()
uint32_t getNumCounts() const
uint64_t getMaxInternalCount() const
uint64_t getMaxFunctionCount() const
uint32_t getNumFunctions() const
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
reference emplace_back(ArgTypes &&... Args)
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
StringRef - Represent a constant reference to a string, i.e.
std::pair< iterator, bool > insert(const ValueT &V)
An efficient, type-erasing, non-owning reference to a callable.
A raw_ostream that writes to a file descriptor.
This class implements an extremely fast bulk output stream that can only output to a stream.
raw_ostream & write_hex(unsigned long long N)
Output N in hexadecimal, without any prefix or padding.
raw_ostream & write(unsigned char C)
A raw_ostream that writes to an std::string.
std::unique_ptr< Summary > allocSummary(uint32_t TotalSize)
uint64_t ComputeHash(StringRef K)
LLVM_ABI bool isAvailable()
This is an optimization pass for GlobalISel generic memory operations.
void append_range(Container &C, Range &&R)
Wrapper function to append range R to container C.
void shuffle(Iterator first, Iterator last, RNG &&g)
auto unique(Range &&R, Predicate P)
bool any_of(R &&range, UnaryPredicate P)
Provide wrappers to std::any_of which take ranges instead of having to pass begin/end explicitly.
void sort(IteratorTy Start, IteratorTy End)
constexpr T alignToPowerOf2(U Value, V Align)
Will overflow only if result is not representable in T.
FunctionAddr VTableAddr Count
class LLVM_GSL_OWNER SmallVector
Forward declaration of SmallVector so that calculateSmallVectorDefaultInlinedElements can reference s...
Error make_error(ArgTs &&... Args)
Make a Error instance representing failure using the given error info type.
LLVM_ABI raw_fd_ostream & errs()
This returns a reference to a raw_ostream for standard error.
FunctionAddr VTableAddr uintptr_t uintptr_t Data
uint64_t alignTo(uint64_t Size, Align A)
Returns a multiple of A needed to store Size bytes.
LLVM_ABI Error collectGlobalObjectNameStrings(ArrayRef< std::string > NameStrs, bool doCompression, std::string &Result)
Given a vector of strings (names of global objects like functions or, virtual tables) NameStrs,...
void erase_if(Container &C, UnaryPredicate P)
Provide a container algorithm similar to C++ Library Fundamentals v2's erase_if which is equivalent t...
LLVM_ABI Error writeMemProf(ProfOStream &OS, memprof::IndexedMemProfData &MemProfData, memprof::IndexedVersion MemProfVersionRequested, bool MemProfFullSchema, std::unique_ptr< memprof::DataAccessProfData > DataAccessProfileData, std::unique_ptr< memprof::MemProfSummary > MemProfSum)
@ LoopEntriesInstrumentation
@ FunctionEntryInstrumentation
Incoming for lane maks phi as machine instruction, incoming register Reg and incoming block Block are...
void set(SummaryFieldKind K, uint64_t V)
void setEntry(uint32_t I, const ProfileSummaryEntry &E)
uint64_t NumSummaryFields
uint64_t NumCutoffEntries
Profiling information for a single function.
std::vector< uint64_t > Counts
LLVM_ABI void merge(InstrProfRecord &Other, uint64_t Weight, function_ref< void(instrprof_error)> Warn)
Merge the counts in Other into this one.
LLVM_ABI void overlap(InstrProfRecord &Other, OverlapStats &Overlap, OverlapStats &FuncLevelOverlap, uint64_t ValueCutoff)
Compute the overlap b/w this IntrprofRecord and Other.
void sortValueData()
Sort value profile data (per site) by count.
std::vector< uint8_t > BitmapBytes
LLVM_ABI void scale(uint64_t N, uint64_t D, function_ref< void(instrprof_error)> Warn)
Scale up profile counts (including value profile data) by a factor of (N / D).
static bool hasCSFlagInHash(uint64_t FuncHash)
const std::string NameFilter
LLVM_ABI void addOneMismatch(const CountSumOrPercent &MismatchFunc)
CountSumOrPercent Overlap
LLVM_ABI void addOneUnique(const CountSumOrPercent &UniqueFunc)
void merge(const IndexedMemProfRecord &Other)
Adapter to write values to a stream in a particular byte order.