TensorFlow Serving C++ API Documentation
server_request_logger.cc
1 /* Copyright 2016 Google Inc. All Rights Reserved.
2 
3 Licensed under the Apache License, Version 2.0 (the "License");
4 you may not use this file except in compliance with the License.
5 You may obtain a copy of the License at
6 
7  http://www.apache.org/licenses/LICENSE-2.0
8 
9 Unless required by applicable law or agreed to in writing, software
10 distributed under the License is distributed on an "AS IS" BASIS,
11 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 See the License for the specific language governing permissions and
13 limitations under the License.
14 ==============================================================================*/
15 
16 #include "tensorflow_serving/core/server_request_logger.h"
17 
18 #include <functional>
19 #include <map>
20 #include <memory>
21 #include <utility>
22 #include <vector>
23 
24 #include "tensorflow/core/lib/core/errors.h"
25 #include "tensorflow/core/lib/gtl/map_util.h"
26 #include "tensorflow/core/lib/strings/proto_serialization.h"
27 #include "tensorflow/core/platform/macros.h"
28 #include "tensorflow/core/platform/status.h"
29 #include "tsl/platform/errors.h"
30 #include "tensorflow_serving/apis/logging.pb.h"
31 #include "tensorflow_serving/apis/model.pb.h"
32 #include "tensorflow_serving/core/request_logger.h"
33 
34 namespace tensorflow {
35 namespace serving {
36 
37 // static
38 Status ServerRequestLogger::Create(
39  LoggerCreator request_logger_creator,
40  std::unique_ptr<ServerRequestLogger>* server_request_logger) {
41  server_request_logger->reset(
42  new ServerRequestLogger(std::move(request_logger_creator)));
43  return OkStatus();
44 }
45 
46 ServerRequestLogger::ServerRequestLogger(LoggerCreator request_logger_creator)
47  : request_logger_creator_(std::move(request_logger_creator)) {}
48 
49 Status ServerRequestLogger::FindOrCreateLogger(
50  const LoggingConfig& config,
51  StringToUniqueRequestLoggerMap* new_config_to_logger_map,
52  std::shared_ptr<RequestLogger>* result) {
53  string serialized_config;
54  if (!SerializeToStringDeterministic(config, &serialized_config)) {
55  return errors::InvalidArgument("Cannot serialize config.");
56  }
57 
58  auto find_new_it = new_config_to_logger_map->find(serialized_config);
59  if (find_new_it != new_config_to_logger_map->end()) {
60  // The logger is already in new_config_to_logger_map, simply return it.
61  *result = find_new_it->second;
62  return OkStatus();
63  }
64 
65  auto find_old_it = config_to_logger_map_.find(serialized_config);
66  if (find_old_it != config_to_logger_map_.end()) {
67  // The logger is in old_config_to_logger_map. Move it to
68  // new_config_to_logger_map, erase the entry in config_to_logger_map_ and
69  // return the logger.
70  *result = find_old_it->second;
71  new_config_to_logger_map->emplace(
72  std::make_pair(serialized_config, std::move(find_old_it->second)));
73  config_to_logger_map_.erase(find_old_it);
74  return OkStatus();
75  }
76 
77  // The logger does not exist. Create a new logger, insert it into
78  // new_config_to_logger_map and return it.
79  TF_RETURN_IF_ERROR(request_logger_creator_(config, result));
80  new_config_to_logger_map->emplace(std::make_pair(serialized_config, *result));
81  return OkStatus();
82 }
83 
84 Status ServerRequestLogger::Update(
85  const std::map<string, std::vector<LoggingConfig>>& logging_config_map) {
86  if (!logging_config_map.empty() && !request_logger_creator_) {
87  return errors::InvalidArgument("No request-logger-creator provided.");
88  }
89 
90  // Those new maps will only contain loggers from logging_config_map and
91  // replace the current versions further down.
92  std::unique_ptr<StringToRequestLoggersMap> new_model_to_loggers_map(
93  new StringToRequestLoggersMap());
94  StringToUniqueRequestLoggerMap new_config_to_logger_map;
95 
96  mutex_lock l(update_mu_);
97 
98  for (const auto& model_and_logging_config : logging_config_map) {
99  for (const auto& logging_config : model_and_logging_config.second) {
100  std::shared_ptr<RequestLogger> logger;
101  TF_RETURN_IF_ERROR(FindOrCreateLogger(
102  logging_config, &new_config_to_logger_map, &logger));
103  const string& model_name = model_and_logging_config.first;
104  (*new_model_to_loggers_map)[model_name].push_back(logger);
105  }
106  }
107 
108  model_to_loggers_map_.Update(std::move(new_model_to_loggers_map));
109  // Any remaining loggers in config_to_logger_map_ will not be needed anymore
110  // and destructed at this point.
111  config_to_logger_map_ = std::move(new_config_to_logger_map);
112 
113  return OkStatus();
114 }
115 
116 Status ServerRequestLogger::Log(const google::protobuf::Message& request,
117  const google::protobuf::Message& response,
118  const LogMetadata& log_metadata) {
119  Status status;
120  InvokeLoggerForModel(
121  log_metadata, [&status, &request, &response, &log_metadata](
122  const std::shared_ptr<RequestLogger>& logger) {
123  // Note: Only first error will be tracked/returned.
124  status.Update(logger->Log(request, response, log_metadata));
125  });
126  return status;
127 }
128 
129 void ServerRequestLogger::InvokeLoggerForModel(
130  const LogMetadata& log_metadata,
131  std::function<void(const std::shared_ptr<RequestLogger>&)> fn) {
132  const string& model_name = log_metadata.model_spec().name();
133  auto model_to_loggers_map = model_to_loggers_map_.get();
134  if (!model_to_loggers_map || model_to_loggers_map->empty()) {
135  VLOG(2) << "Request loggers map is empty.";
136  return;
137  }
138  auto found_it = model_to_loggers_map->find(model_name);
139  if (found_it == model_to_loggers_map->end()) {
140  VLOG(2) << "Cannot find request-loggers for model: " << model_name;
141  return;
142  }
143  for (const auto& logger : found_it->second) {
144  fn(logger);
145  }
146 }
147 
148 } // namespace serving
149 } // namespace tensorflow