16 #ifndef TENSORFLOW_SERVING_SERVABLES_TENSORFLOW_MULTI_INFERENCE_HELPER_H_
17 #define TENSORFLOW_SERVING_SERVABLES_TENSORFLOW_MULTI_INFERENCE_HELPER_H_
19 #include "absl/types/optional.h"
20 #include "tensorflow/core/lib/core/status.h"
21 #include "tensorflow/core/platform/threadpool_options.h"
22 #include "tensorflow_serving/apis/inference.pb.h"
23 #include "tensorflow_serving/model_servers/server_core.h"
25 namespace tensorflow {
29 Status RunMultiInferenceWithServerCore(
30 const RunOptions& run_options, ServerCore* core,
31 const thread::ThreadPoolOptions& thread_pool_options,
32 const MultiInferenceRequest& request, MultiInferenceResponse* response);
36 Status RunMultiInferenceWithServerCoreWithModelSpec(
37 const RunOptions& run_options, ServerCore* core,
38 const thread::ThreadPoolOptions& thread_pool_options,
39 const ModelSpec& model_spec,
const MultiInferenceRequest& request,
40 MultiInferenceResponse* response);