14#ifndef LLVM_ANALYSIS_RELEASEMODEMODELRUNNER_H
15#define LLVM_ANALYSIS_RELEASEMODEMODELRUNNER_H
59 template <
class FType>
64 CompiledModel(
std::make_unique<TGen>()) {
65 assert(CompiledModel &&
"The CompiledModel should be valid");
72 bool InputIsPresent =
true;
73 populateTensor(InputSpec.size(),
75 Options.FeedPrefix, InputIsPresent);
80 if (
Options.ModelSelector.empty() && InputIsPresent)
82 "A model selector was not specified but the underlying model "
83 "requires selecting one because it exposes a model_selector input");
86 if (!
Options.ModelSelector.empty()) {
88 Ctx.emitError(
"A model selector was specified but the underlying model "
89 "does not expose a model_selector input");
101 for (
size_t I = 0;
I < InputSpec.size(); ++
I)
102 populateTensor(
I, InputSpec[
I],
Options.FeedPrefix, InputIsPresent);
104 ResultIndex = CompiledModel->LookupResultIndex(
Options.FetchPrefix.str() +
106 assert(ResultIndex >= 0 &&
"Cannot find DecisionName in inlining model");
120 bool &InputIsPresent) {
122 CompiledModel->LookupArgIndex((Prefix +
Spec.name()).str());
123 void *Buffer =
nullptr;
124 InputIsPresent = Index >= 0;
126 Buffer = CompiledModel->arg_data(Index);
131 CompiledModel->Run();
132 return CompiledModel->result_data(ResultIndex);
135 int32_t ResultIndex = -1;
136 std::unique_ptr<TGen> CompiledModel;
143#define NOOP_MODEL_ERRMSG \
144 "The mock AOT-ed saved model is a compile-time stub and should not be " \
154#undef NOOP_MODEL_ERRMSG
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
#define NOOP_MODEL_ERRMSG
This is an important class for using LLVM in a threaded context.
static LLVM_ABI MD5Result hash(ArrayRef< uint8_t > Data)
Computes the hash for a given bytes.
MLModelRunner interface: abstraction of a mechanism for evaluating a ML model.
virtual void * evaluateUntyped()=0
T * getTensor(I FeatureID)
void setUpBufferForTensor(size_t Index, const TensorSpec &Spec, void *Buffer)
MLModelRunner(const MLModelRunner &)=delete
int LookupArgIndex(const std::string &)
NoopSavedModelImpl()=default
int LookupResultIndex(const std::string &)
ReleaseModeModelRunner(LLVMContext &Ctx, const FType &InputSpec, StringRef DecisionName, const EmbeddedModelRunnerOptions &Options={})
FeatureNames' type should be an indexed collection of std::string, like std::array or std::vector,...
static bool classof(const MLModelRunner *R)
virtual ~ReleaseModeModelRunner()=default
StringRef - Represent a constant reference to a string, i.e.
static TensorSpec createSpec(const std::string &Name, const std::vector< int64_t > &Shape, int Port=0)
LLVM Value Representation.
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
This is an optimization pass for GlobalISel generic memory operations.
@ Low
Lower the current thread's priority such that it does not affect foreground tasks significantly.
bool isEmbeddedModelEvaluatorValid()
auto size(R &&Range, std::enable_if_t< std::is_base_of< std::random_access_iterator_tag, typename std::iterator_traits< decltype(Range.begin())>::iterator_category >::value, void > *=nullptr)
Get the size of a range.
ArrayRef< CharT > arrayRefFromStringRef(StringRef Input)
Construct a string ref from an array ref of unsigned chars.
bool isEmbeddedModelEvaluatorValid< NoopSavedModelImpl >()
LLVM_ABI const char *const DecisionName
Implement std::hash so that hash_code can be used in STL containers.
ReleaseModeModelRunner - production mode implementation of the MLModelRunner.
EmbeddedModelRunnerOptions & setFeedPrefix(StringRef Value)
StringRef FeedPrefix
Feed and Fetch feature prefixes - i.e.
EmbeddedModelRunnerOptions & setModelSelector(StringRef Value)
StringRef ModelSelector
ModelSelector is the name (recognized by the AOT-ed model) of a sub-model to use.
EmbeddedModelRunnerOptions & setFetchPrefix(StringRef Value)