TensorFlow Serving C++ API Documentation
server_request_logger_test.cc
1 /* Copyright 2017 Google Inc. All Rights Reserved.
2 
3 Licensed under the Apache License, Version 2.0 (the "License");
4 you may not use this file except in compliance with the License.
5 You may obtain a copy of the License at
6 
7  http://www.apache.org/licenses/LICENSE-2.0
8 
9 Unless required by applicable law or agreed to in writing, software
10 distributed under the License is distributed on an "AS IS" BASIS,
11 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 See the License for the specific language governing permissions and
13 limitations under the License.
14 ==============================================================================*/
15 
16 #include "tensorflow_serving/core/server_request_logger.h"
17 
18 #include <functional>
19 #include <map>
20 #include <memory>
21 #include <unordered_map>
22 #include <utility>
23 #include <vector>
24 
25 #include "google/protobuf/any.pb.h"
26 #include "google/protobuf/message.h"
27 #include <gmock/gmock.h>
28 #include <gtest/gtest.h>
29 #include "absl/memory/memory.h"
30 #include "absl/status/status.h"
31 #include "absl/strings/str_cat.h"
32 #include "tensorflow/cc/saved_model/tag_constants.h"
33 #include "xla/tsl/lib/core/status_test_util.h"
34 #include "tensorflow/core/lib/core/errors.h"
35 #include "tensorflow/core/lib/core/status.h"
36 #include "tensorflow/core/lib/core/status_test_util.h"
37 #include "tensorflow_serving/apis/logging.pb.h"
38 #include "tensorflow_serving/apis/model.pb.h"
39 #include "tensorflow_serving/apis/predict.pb.h"
40 #include "tensorflow_serving/config/log_collector_config.pb.h"
41 #include "tensorflow_serving/config/logging_config.pb.h"
42 #include "tensorflow_serving/core/log_collector.h"
43 #include "tensorflow_serving/core/request_logger.h"
44 #include "tensorflow_serving/core/test_util/fake_log_collector.h"
45 #include "tensorflow_serving/core/test_util/mock_prediction_stream_logger.h"
46 #include "tensorflow_serving/core/test_util/mock_request_logger.h"
47 #include "tensorflow_serving/test_util/test_util.h"
48 
49 namespace tensorflow {
50 namespace serving {
51 namespace {
52 
53 using test_util::MockPredictionStreamLogger;
54 using ::testing::_;
55 using ::testing::Invoke;
56 using ::testing::NiceMock;
57 
58 LogCollectorConfig CreateLogCollectorConfig(const string& type,
59  const string& filename_prefix) {
60  LogCollectorConfig config;
61  config.set_type(type);
62  config.set_filename_prefix(filename_prefix);
63  return config;
64 }
65 
66 std::pair<string, LoggingConfig> CreateLoggingConfigForModel(
67  const string& model_name, const string& log_filename_suffix = "") {
68  string filename_prefix = absl::StrCat("/file/", model_name);
69  if (!log_filename_suffix.empty()) {
70  absl::StrAppend(&filename_prefix, "-", log_filename_suffix);
71  }
72  LoggingConfig logging_config;
73  *logging_config.mutable_log_collector_config() =
74  CreateLogCollectorConfig("", filename_prefix);
75  logging_config.mutable_sampling_config()->set_sampling_rate(1.0);
76  return {model_name, logging_config};
77 }
78 
79 std::map<string, std::vector<LoggingConfig>> CreateLoggingConfigMap(
80  const std::vector<std::pair<string, LoggingConfig>>& model_configs) {
81  std::map<string, std::vector<LoggingConfig>> config_map;
82  for (const auto& model_config : model_configs) {
83  config_map[model_config.first].push_back(model_config.second);
84  }
85  return config_map;
86 }
87 
88 class ServerRequestLoggerTest : public ::testing::Test {
89  protected:
90  ServerRequestLoggerTest() {
91  TF_CHECK_OK(ServerRequestLogger::Create(
92  [&](const LoggingConfig& logging_config,
93  std::shared_ptr<RequestLogger>* const request_logger) {
94  const string& filename_prefix =
95  logging_config.log_collector_config().filename_prefix();
96  log_collector_map_[filename_prefix] = new FakeLogCollector();
97  increment_created_logger_counter();
98  auto logger_destruction_notifier = [this]() {
99  increment_deleted_logger_counter();
100  };
101  const std::vector<string>& tags = {kSavedModelTagServe};
102  auto mock_request_logger =
103  std::shared_ptr<NiceMock<MockRequestLogger>>(
104  new NiceMock<MockRequestLogger>(
105  logging_config, tags, log_collector_map_[filename_prefix],
106  logger_destruction_notifier));
107  ON_CALL(*mock_request_logger, CreateLogMessage(_, _, _, _))
108  .WillByDefault(Invoke([&](const google::protobuf::Message& actual_request,
109  const google::protobuf::Message& actual_response,
110  const LogMetadata& actual_log_metadata,
111  std::unique_ptr<google::protobuf::Message>* log) {
112  *log = std::unique_ptr<google::protobuf::Any>(
113  new google::protobuf::Any());
114  return request_logger_status_cb_();
115  }));
116  ON_CALL(*mock_request_logger, FillLogMetadata(_))
117  .WillByDefault(Invoke([&](const LogMetadata& log_metadata) {
118  return log_metadata;
119  }));
120  *request_logger = std::move(mock_request_logger);
121  return OkStatus();
122  },
123  &server_request_logger_));
124  }
125 
126  void increment_created_logger_counter() {
127  mutex_lock l(m_);
128  created_logger_counter_++;
129  }
130 
131  int created_logger_counter() const {
132  mutex_lock sl(m_);
133  return created_logger_counter_;
134  }
135 
136  void increment_deleted_logger_counter() {
137  mutex_lock l(m_);
138  deleted_logger_counter_++;
139  }
140 
141  int deleted_logger_counter() const {
142  mutex_lock sl(m_);
143  return deleted_logger_counter_;
144  }
145 
146  mutable mutex m_;
147  int created_logger_counter_ = 0;
148  int deleted_logger_counter_ = 0;
149  std::function<Status()> request_logger_status_cb_ = []() {
150  return OkStatus();
151  };
152  std::unordered_map<string, FakeLogCollector*> log_collector_map_;
153  std::unique_ptr<ServerRequestLogger> server_request_logger_;
154 };
155 
156 TEST_F(ServerRequestLoggerTest, Empty) {
157  TF_ASSERT_OK(server_request_logger_->Update({}));
158  TF_ASSERT_OK(server_request_logger_->Log(PredictRequest(), PredictResponse(),
159  LogMetadata()));
160  // No log-collectors should have been made.
161  EXPECT_TRUE(log_collector_map_.empty());
162 }
163 
164 TEST_F(ServerRequestLoggerTest, AbsentModel) {
165  TF_ASSERT_OK(server_request_logger_->Update(
166  CreateLoggingConfigMap({CreateLoggingConfigForModel("model0")})));
167  LogMetadata log_metadata;
168  auto* const model_spec = log_metadata.mutable_model_spec();
169  model_spec->set_name("absent_model");
170  TF_ASSERT_OK(server_request_logger_->Log(PredictRequest(), PredictResponse(),
171  log_metadata));
172  ASSERT_EQ(1, log_collector_map_.size());
173  EXPECT_EQ(0, log_collector_map_["/file/model0"]->collect_count());
174 }
175 
176 TEST_F(ServerRequestLoggerTest, MultipleModels) {
177  TF_ASSERT_OK(server_request_logger_->Update(
178  CreateLoggingConfigMap({CreateLoggingConfigForModel("model0"),
179  CreateLoggingConfigForModel("model1")})));
180 
181  LogMetadata log_metadata0;
182  auto* const model_spec0 = log_metadata0.mutable_model_spec();
183  model_spec0->set_name("model0");
184  TF_ASSERT_OK(server_request_logger_->Log(PredictRequest(), PredictResponse(),
185  log_metadata0));
186  ASSERT_EQ(2, log_collector_map_.size());
187  EXPECT_EQ(1, log_collector_map_["/file/model0"]->collect_count());
188  EXPECT_EQ(0, log_collector_map_["/file/model1"]->collect_count());
189 
190  LogMetadata log_metadata1;
191  auto* const model_spec = log_metadata1.mutable_model_spec();
192  model_spec->set_name("model1");
193  TF_ASSERT_OK(server_request_logger_->Log(PredictRequest(), PredictResponse(),
194  log_metadata1));
195  ASSERT_EQ(2, log_collector_map_.size());
196  EXPECT_EQ(1, log_collector_map_["/file/model0"]->collect_count());
197  EXPECT_EQ(1, log_collector_map_["/file/model1"]->collect_count());
198 }
199 
200 TEST_F(ServerRequestLoggerTest, CreateAndDeleteLogger) {
201  auto model_logging_configs =
202  CreateLoggingConfigMap({CreateLoggingConfigForModel("model0")});
203  TF_ASSERT_OK(server_request_logger_->Update(model_logging_configs));
204  EXPECT_EQ(1, created_logger_counter());
205  EXPECT_EQ(0, deleted_logger_counter());
206 
207  model_logging_configs.clear();
208  TF_ASSERT_OK(server_request_logger_->Update(model_logging_configs));
209  EXPECT_EQ(1, created_logger_counter());
210  EXPECT_EQ(1, deleted_logger_counter());
211 }
212 
213 TEST_F(ServerRequestLoggerTest, CreateAndModifyLogger) {
214  auto model_logging_configs =
215  CreateLoggingConfigMap({CreateLoggingConfigForModel("model0")});
216  TF_ASSERT_OK(server_request_logger_->Update(model_logging_configs));
217  EXPECT_EQ(1, created_logger_counter());
218  EXPECT_EQ(0, deleted_logger_counter());
219 
220  model_logging_configs["model0"][0]
221  .mutable_sampling_config()
222  ->set_sampling_rate(0.17);
223 
224  TF_ASSERT_OK(server_request_logger_->Update(model_logging_configs));
225  EXPECT_EQ(2, created_logger_counter());
226  EXPECT_EQ(1, deleted_logger_counter());
227 }
228 
229 TEST_F(ServerRequestLoggerTest, SameConfigForTwoModelsCreatesOneLogger) {
230  std::pair<string, LoggingConfig> model_and_config1 =
231  CreateLoggingConfigForModel("model");
232  std::pair<string, LoggingConfig> model_and_config2 = {
233  "model2", model_and_config1.second};
234  TF_ASSERT_OK(server_request_logger_->Update(
235  CreateLoggingConfigMap({model_and_config1, model_and_config2})));
236 
237  EXPECT_EQ(1, created_logger_counter());
238  EXPECT_EQ(0, deleted_logger_counter());
239 }
240 
241 TEST_F(ServerRequestLoggerTest, MultipleConfigForOneModel) {
242  TF_ASSERT_OK(server_request_logger_->Update(CreateLoggingConfigMap(
243  {CreateLoggingConfigForModel("model0"),
244  CreateLoggingConfigForModel("model0", "infra")})));
245  EXPECT_EQ(2, created_logger_counter());
246  EXPECT_EQ(0, deleted_logger_counter());
247 }
248 
249 TEST_F(ServerRequestLoggerTest, MultipleLoggersForOneModel) {
250  TF_ASSERT_OK(server_request_logger_->Update(CreateLoggingConfigMap(
251  {CreateLoggingConfigForModel("model0"),
252  CreateLoggingConfigForModel("model0", "infra")})));
253 
254  LogMetadata log_metadata0;
255  auto* const model_spec0 = log_metadata0.mutable_model_spec();
256  model_spec0->set_name("model0");
257  TF_ASSERT_OK(server_request_logger_->Log(PredictRequest(), PredictResponse(),
258  log_metadata0));
259  ASSERT_EQ(2, log_collector_map_.size());
260  EXPECT_EQ(1, log_collector_map_["/file/model0"]->collect_count());
261  EXPECT_EQ(1, log_collector_map_["/file/model0-infra"]->collect_count());
262 
263  LogMetadata log_metadata1;
264  auto* const model_spec = log_metadata1.mutable_model_spec();
265  model_spec->set_name("model1");
266  TF_ASSERT_OK(server_request_logger_->Log(PredictRequest(), PredictResponse(),
267  log_metadata1));
268  ASSERT_EQ(2, log_collector_map_.size());
269  EXPECT_EQ(1, log_collector_map_["/file/model0"]->collect_count());
270  EXPECT_EQ(1, log_collector_map_["/file/model0-infra"]->collect_count());
271  EXPECT_EQ(log_collector_map_.end(), log_collector_map_.find("/file/model1"));
272 }
273 
274 TEST_F(ServerRequestLoggerTest, MultipleLoggersOneModelErrors) {
275  TF_ASSERT_OK(server_request_logger_->Update(CreateLoggingConfigMap(
276  {CreateLoggingConfigForModel("model0"),
277  CreateLoggingConfigForModel("model0", "infra")})));
278 
279  // Inject errors for all Log() calls.
280  int req_count = 0;
281  request_logger_status_cb_ = [&]() {
282  return errors::InvalidArgument(absl::StrCat(req_count++));
283  };
284 
285  LogMetadata log_metadata0;
286  auto* const model_spec0 = log_metadata0.mutable_model_spec();
287  model_spec0->set_name("model0");
288  EXPECT_EQ(errors::InvalidArgument("0"),
289  server_request_logger_->Log(PredictRequest(), PredictResponse(),
290  log_metadata0));
291 
292  ASSERT_EQ(2, log_collector_map_.size());
293  EXPECT_EQ(0, log_collector_map_["/file/model0"]->collect_count());
294  EXPECT_EQ(0, log_collector_map_["/file/model0-infra"]->collect_count());
295 }
296 
297 TEST_F(ServerRequestLoggerTest, MultipleUpdatesSingleCreation) {
298  const auto& model_logging_configs =
299  CreateLoggingConfigMap({CreateLoggingConfigForModel("model0"),
300  CreateLoggingConfigForModel("model1")});
301  for (int i = 0; i < 100; i++) {
302  TF_ASSERT_OK(server_request_logger_->Update(model_logging_configs));
303  }
304 
305  EXPECT_EQ(2, created_logger_counter());
306  EXPECT_EQ(0, deleted_logger_counter());
307 }
308 
309 TEST_F(ServerRequestLoggerTest, StreamLoggingBasic) {
310  auto model_logging_configs =
311  CreateLoggingConfigMap({CreateLoggingConfigForModel("model0", "file1"),
312  CreateLoggingConfigForModel("model0", "file2")});
313 
314  TF_ASSERT_OK(server_request_logger_->Update(model_logging_configs));
315  LogMetadata log_metadata;
316  log_metadata.mutable_model_spec()->set_name("model0");
317  auto* logger_ptr = new MockPredictionStreamLogger();
318  auto logger = server_request_logger_
319  ->StartLoggingStream<PredictRequest, PredictResponse>(
320  log_metadata, [logger_ptr]() {
321  return absl::WrapUnique(logger_ptr);
322  });
323  EXPECT_CALL(*logger_ptr, CreateLogMessage(_, _))
324  .Times(2)
325  .WillRepeatedly(Invoke([](const LogMetadata& log_metadata,
326  std::unique_ptr<google::protobuf::Message>* log) {
327  *log = std::make_unique<google::protobuf::Any>();
328  return absl::OkStatus();
329  }));
330  TF_ASSERT_OK(logger->LogMessage());
331  ASSERT_EQ(2, log_collector_map_.size());
332  EXPECT_EQ(1, log_collector_map_["/file/model0-file1"]->collect_count());
333  EXPECT_EQ(1, log_collector_map_["/file/model0-file2"]->collect_count());
334 }
335 
336 TEST_F(ServerRequestLoggerTest, StreamLoggingUpdateLoggingConfig) {
337  auto model_logging_configs =
338  CreateLoggingConfigMap({CreateLoggingConfigForModel("model0", "file1"),
339  CreateLoggingConfigForModel("model0", "file2")});
340 
341  TF_ASSERT_OK(server_request_logger_->Update(model_logging_configs));
342  LogMetadata log_metadata;
343  log_metadata.mutable_model_spec()->set_name("model0");
344  auto* logger_ptr = new MockPredictionStreamLogger();
345  auto logger = server_request_logger_->StartLoggingStream<PredictRequest,
346  PredictResponse>(
347  log_metadata, [logger_ptr]() {
348  return std::unique_ptr<StreamLogger<PredictRequest, PredictResponse>>(
349  logger_ptr);
350  });
351 
352  model_logging_configs =
353  CreateLoggingConfigMap({CreateLoggingConfigForModel("model0", "file2"),
354  CreateLoggingConfigForModel("model0", "file3")});
355 
356  // Updates to a new logging config. Since the stream hasn't finished, the
357  // stream logger will not use the new config.
358  TF_ASSERT_OK(server_request_logger_->Update(model_logging_configs));
359  EXPECT_CALL(*logger_ptr, CreateLogMessage(_, _))
360  .Times(2)
361  .WillRepeatedly(Invoke([](const LogMetadata& log_metadata,
362  std::unique_ptr<google::protobuf::Message>* log) {
363  *log =
364  std::unique_ptr<google::protobuf::Any>(new google::protobuf::Any());
365  return absl::OkStatus();
366  }));
367  TF_ASSERT_OK(logger->LogMessage());
368  ASSERT_EQ(3, log_collector_map_.size());
369  EXPECT_EQ(1, log_collector_map_["/file/model0-file2"]->collect_count());
370  EXPECT_EQ(0, log_collector_map_["/file/model0-file3"]->collect_count());
371 }
372 
373 } // namespace
374 } // namespace serving
375 } // namespace tensorflow