47template <
typename FrameIdTy>
57 CommonLen = std::distance(CallStack->
rbegin(), Pos.second);
61 assert(CommonLen <= Indexes.size());
62 Indexes.resize(CommonLen);
66 uint32_t CurrentIndex = RadixArray.size();
67 uint32_t ParentIndex = Indexes.back();
70 assert(ParentIndex < CurrentIndex);
71 RadixArray.push_back(ParentIndex - CurrentIndex);
78 Indexes.push_back(RadixArray.size());
80 MemProfFrameIndexes ? MemProfFrameIndexes->
find(
F)->second :
F);
85 RadixArray.push_back(CallStack->
size());
89 return RadixArray.size() - 1;
92template <
typename FrameIdTy>
95 &&MemProfCallStackData,
103 if (CallStacks.
empty()) {
105 CallStackPos.clear();
144 llvm::sort(CallStacks, [&](
const CSIdPair &L,
const CSIdPair &R) {
147 return std::lexicographical_compare(
148 L.second.rbegin(), L.second.rend(), R.second.rbegin(), R.second.rend(),
149 [&](FrameIdTy F1, FrameIdTy F2) {
150 uint64_t H1 = FrameHistogram[F1].Count;
151 uint64_t H2 = FrameHistogram[F2].Count;
163 RadixArray.reserve(CallStacks.
size() * 8);
167 Indexes.reserve(512);
170 CallStackPos.clear();
171 CallStackPos.reserve(CallStacks.
size());
202 encodeCallStack(&
CallStack, Prev, MemProfFrameIndexes);
203 CallStackPos.insert({CSId, Pos});
208 assert(!RadixArray.empty());
214 for (
size_t I = 0, J = RadixArray.size() - 1;
I < J; ++
I, --J)
218 for (
auto &[K, V] : CallStackPos)
219 V = RadixArray.size() - 1 - V;
226template <
typename FrameIdTy>
229 &MemProfCallStackData) {
232 for (
const auto &KV : MemProfCallStackData) {
233 const auto &CS = KV.second;
234 for (
unsigned I = 0,
E = CS.size();
I !=
E; ++
I) {
235 auto &S = Histogram[CS[
I]];
247 &MemProfCallStackData);
251 &MemProfCallStackData);
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
static GCRegistry::Add< CoreCLRGC > E("coreclr", "CoreCLR-compatible GC")
#define LLVM_EXPORT_TEMPLATE
iterator find(const_arg_type_t< KeyT > Val)
This class implements a map that also provides access to all stored values in a deterministic order.
reverse_iterator rbegin()
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
void build(llvm::MapVector< CallStackId, llvm::SmallVector< FrameIdTy > > &&MemProfCallStackData, const llvm::DenseMap< FrameIdTy, LinearFrameId > *MemProfFrameIndexes, llvm::DenseMap< FrameIdTy, FrameStat > &FrameHistogram)
Helper class to iterate through stack ids in both metadata (memprof MIB and callsite) and the corresp...
template LLVM_ABI llvm::DenseMap< FrameId, FrameStat > computeFrameHistogram< FrameId >(llvm::MapVector< CallStackId, llvm::SmallVector< FrameId > > &MemProfCallStackData)
uint32_t LinearCallStackId
template LLVM_ABI llvm::DenseMap< LinearFrameId, FrameStat > computeFrameHistogram< LinearFrameId >(llvm::MapVector< CallStackId, llvm::SmallVector< LinearFrameId > > &MemProfCallStackData)
llvm::DenseMap< FrameIdTy, FrameStat > computeFrameHistogram(llvm::MapVector< CallStackId, llvm::SmallVector< FrameIdTy > > &MemProfCallStackData)
This is an optimization pass for GlobalISel generic memory operations.
auto drop_begin(T &&RangeOrContainer, size_t N=1)
Return a range covering RangeOrContainer with the first N elements excluded.
auto size(R &&Range, std::enable_if_t< std::is_base_of< std::random_access_iterator_tag, typename std::iterator_traits< decltype(Range.begin())>::iterator_category >::value, void > *=nullptr)
Get the size of a range.
auto reverse(ContainerTy &&C)
void sort(IteratorTy Start, IteratorTy End)
void swap(llvm::BitVector &LHS, llvm::BitVector &RHS)
Implement std::swap in terms of BitVector swap.