16 #ifndef TENSORFLOW_SERVING_SERVABLES_TENSORFLOW_SAVED_MODEL_BUNDLE_FACTORY_H_
17 #define TENSORFLOW_SERVING_SERVABLES_TENSORFLOW_SAVED_MODEL_BUNDLE_FACTORY_H_
19 #include "absl/types/optional.h"
20 #include "tensorflow/cc/saved_model/loader.h"
21 #include "tensorflow/core/kernels/batching_util/shared_batch_scheduler.h"
22 #include "tensorflow/core/lib/core/status.h"
23 #include "tensorflow/core/platform/macros.h"
24 #include "tensorflow_serving/batching/batching_session.h"
25 #include "tensorflow_serving/core/loader.h"
26 #include "tensorflow_serving/resources/resources.pb.h"
27 #include "tensorflow_serving/servables/tensorflow/session_bundle_config.pb.h"
29 namespace tensorflow {
58 static Status
Create(
const SessionBundleConfig& config,
59 std::unique_ptr<SavedModelBundleFactory>* factory);
67 std::unique_ptr<SavedModelBundle>* bundle);
78 std::unique_ptr<SavedModelBundle>* bundle);
87 ResourceAllocation* estimate)
const;
89 const SessionBundleConfig& config()
const {
return config_; }
90 SessionBundleConfig& mutable_config() {
return config_; }
93 using Batcher = SharedBatchScheduler<BatchingSessionTask>;
96 std::shared_ptr<Batcher> batch_scheduler);
98 Status InternalCreateSavedModelBundle(
99 const absl::optional<Loader::Metadata>& metadata,
const string& path,
100 std::unique_ptr<SavedModelBundle>* bundle);
102 SessionBundleConfig config_;
106 std::shared_ptr<Batcher> batch_scheduler_;
static Status Create(const SessionBundleConfig &config, std::unique_ptr< SavedModelBundleFactory > *factory)
Status EstimateResourceRequirement(const string &path, ResourceAllocation *estimate) const
Status CreateSavedModelBundleWithMetadata(const Loader::Metadata &metadata, const string &path, std::unique_ptr< SavedModelBundle > *bundle)
Status CreateSavedModelBundle(const string &path, std::unique_ptr< SavedModelBundle > *bundle)