@@ -18,14 +18,14 @@ limitations under the License.
18
18
#include < vector>
19
19
20
20
#include " absl/container/flat_hash_map.h"
21
- #include " tensorflow/core/platform/logging.h"
22
21
#include " tensorflow/lite/c/common.h"
23
22
#include " tensorflow/lite/tools/command_line_flags.h"
24
23
#include " tensorflow/lite/tools/evaluation/evaluation_delegate_provider.h"
25
24
#include " tensorflow/lite/tools/evaluation/proto/evaluation_config.pb.h"
26
25
#include " tensorflow/lite/tools/evaluation/proto/evaluation_stages.pb.h"
27
26
#include " tensorflow/lite/tools/evaluation/stages/object_detection_stage.h"
28
27
#include " tensorflow/lite/tools/evaluation/utils.h"
28
+ #include " tensorflow/lite/tools/logging.h"
29
29
30
30
namespace tflite {
31
31
namespace evaluation {
@@ -62,7 +62,7 @@ bool EvaluateModel(const std::string& model_file_path,
62
62
inference_params->set_delegate (ParseStringToDelegateType (delegate));
63
63
if (!delegate.empty () &&
64
64
inference_params->delegate () == TfliteInferenceParams::NONE) {
65
- LOG (WARNING ) << " Unsupported TFLite delegate: " << delegate;
65
+ TFLITE_LOG (WARN ) << " Unsupported TFLite delegate: " << delegate;
66
66
return false ;
67
67
}
68
68
@@ -77,14 +77,10 @@ bool EvaluateModel(const std::string& model_file_path,
77
77
eval.SetAllLabels (model_labels);
78
78
if (eval.Init (&delegate_providers) != kTfLiteOk ) return false ;
79
79
80
- // Open output file for writing.
81
- std::ofstream ofile;
82
- ofile.open (output_file_path, std::ios::out);
83
-
84
80
const int step = image_paths.size () / 100 ;
85
81
for (int i = 0 ; i < image_paths.size (); ++i) {
86
82
if (step > 1 && i % step == 0 ) {
87
- LOG (INFO) << " Finished: " << i / step << " %" ;
83
+ TFLITE_LOG (INFO) << " Finished: " << i / step << " %" ;
88
84
}
89
85
90
86
const std::string image_name = GetNameFromPath (image_paths[i]);
@@ -93,22 +89,65 @@ bool EvaluateModel(const std::string& model_file_path,
93
89
94
90
if (debug_mode) {
95
91
ObjectDetectionResult prediction = *eval.GetLatestPrediction ();
96
- prediction.set_image_name (image_name);
97
- ofile << prediction.DebugString ();
98
- ofile << " ======================================================\n " ;
92
+ TFLITE_LOG (INFO) << " Image: " << image_name << " \n " ;
93
+ for (int i = 0 ; i < prediction.objects_size (); ++i) {
94
+ const auto & object = prediction.objects (i);
95
+ TFLITE_LOG (INFO) << " Object [" << i << " ]" ;
96
+ TFLITE_LOG (INFO) << " Score: " << object.score ();
97
+ TFLITE_LOG (INFO) << " Class-ID: " << object.class_id ();
98
+ TFLITE_LOG (INFO) << " Bounding Box:" ;
99
+ const auto & bounding_box = object.bounding_box ();
100
+ TFLITE_LOG (INFO) << " Normalized Top: "
101
+ << bounding_box.normalized_top ();
102
+ TFLITE_LOG (INFO) << " Normalized Bottom: "
103
+ << bounding_box.normalized_bottom ();
104
+ TFLITE_LOG (INFO) << " Normalized Left: "
105
+ << bounding_box.normalized_left ();
106
+ TFLITE_LOG (INFO) << " Normalized Right: "
107
+ << bounding_box.normalized_right ();
108
+ }
109
+ TFLITE_LOG (INFO)
110
+ << " ======================================================\n " ;
99
111
}
100
112
}
101
113
102
114
// Write metrics to file.
103
- EvaluationStageMetrics metrics = eval.LatestMetrics ();
115
+ EvaluationStageMetrics latest_metrics = eval.LatestMetrics ();
104
116
if (ground_truth_proto_file.empty ()) {
105
117
// mAP metrics are meaningless for no ground truth.
106
- metrics .mutable_process_metrics ()
118
+ latest_metrics .mutable_process_metrics ()
107
119
->mutable_object_detection_metrics ()
108
120
->clear_average_precision_metrics ();
109
121
}
110
- ofile << metrics.DebugString ();
111
- ofile.close ();
122
+ if (!output_file_path.empty ()) {
123
+ std::ofstream metrics_ofile;
124
+ metrics_ofile.open (output_file_path, std::ios::out);
125
+ metrics_ofile << latest_metrics.SerializeAsString ();
126
+ metrics_ofile.close ();
127
+ }
128
+ TFLITE_LOG (INFO) << " Num evaluation runs: " << latest_metrics.num_runs ();
129
+ const auto object_detection_metrics =
130
+ latest_metrics.process_metrics ().object_detection_metrics ();
131
+ const auto & preprocessing_latency =
132
+ object_detection_metrics.pre_processing_latency ();
133
+ TFLITE_LOG (INFO) << " Preprocessing latency: avg="
134
+ << preprocessing_latency.avg_us () << " (us), std_dev="
135
+ << preprocessing_latency.std_deviation_us () << " (us)" ;
136
+ const auto & inference_latency = object_detection_metrics.inference_latency ();
137
+ TFLITE_LOG (INFO) << " Inference latency: avg=" << inference_latency.avg_us ()
138
+ << " (us), std_dev=" << inference_latency.std_deviation_us ()
139
+ << " (us)" ;
140
+ const auto & precision_metrics =
141
+ object_detection_metrics.average_precision_metrics ();
142
+ for (int i = 0 ; i < precision_metrics.individual_average_precisions_size ();
143
+ ++i) {
144
+ const auto ap_metric = precision_metrics.individual_average_precisions (i);
145
+ TFLITE_LOG (INFO) << " Average Precision [IOU Threshold="
146
+ << ap_metric.iou_threshold ()
147
+ << " ]: " << ap_metric.average_precision ();
148
+ }
149
+ TFLITE_LOG (INFO) << " Overall mAP: "
150
+ << precision_metrics.overall_mean_average_precision ();
112
151
113
152
return true ;
114
153
}
@@ -167,14 +206,14 @@ int Main(int argc, char* argv[]) {
167
206
168
207
std::vector<std::string> model_labels;
169
208
if (!ReadFileLines (model_output_labels_path, &model_labels)) {
170
- LOG (ERROR) << " Could not read model output labels file" ;
209
+ TFLITE_LOG (ERROR) << " Could not read model output labels file" ;
171
210
return EXIT_FAILURE;
172
211
}
173
212
174
213
if (!EvaluateModel (model_file_path, model_labels, image_paths,
175
214
ground_truth_proto_file, delegate, output_file_path,
176
215
num_interpreter_threads, debug_mode, delegate_providers)) {
177
- LOG (ERROR) << " Could not evaluate model" ;
216
+ TFLITE_LOG (ERROR) << " Could not evaluate model" ;
178
217
return EXIT_FAILURE;
179
218
}
180
219
0 commit comments