16 #ifndef TENSORFLOW_SERVING_SERVABLES_TENSORFLOW_TFRT_MULTI_INFERENCE_H_
17 #define TENSORFLOW_SERVING_SERVABLES_TENSORFLOW_TFRT_MULTI_INFERENCE_H_
19 #include "absl/types/optional.h"
20 #include "tensorflow/core/lib/core/status.h"
21 #include "tensorflow/core/tfrt/saved_model/saved_model.h"
22 #include "tensorflow_serving/apis/inference.pb.h"
23 #include "tensorflow_serving/model_servers/server_core.h"
25 namespace tensorflow {
29 Status RunMultiInference(
const tfrt::SavedModel::RunOptions& run_options,
30 const absl::optional<int64_t>& servable_version,
31 tfrt::SavedModel* saved_model,
32 const MultiInferenceRequest& request,
33 MultiInferenceResponse* response);