1 /* Copyright 2020 The TensorFlow Authors. All Rights Reserved.
2
3 Licensed under the Apache License, Version 2.0 (the "License");
4 you may not use this file except in compliance with the License.
5 You may obtain a copy of the License at
6
7 http://www.apache.org/licenses/LICENSE-2.0
8
9 Unless required by applicable law or agreed to in writing, software
10 distributed under the License is distributed on an "AS IS" BASIS,
11 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 See the License for the specific language governing permissions and
13 limitations under the License.
14 ==============================================================================*/
15
16 #include <errno.h>
17 #include <jni.h>
18 #include <sys/stat.h>
19 #include <unistd.h>
20
21 #include <fstream>
22 #include <sstream>
23 #include <string>
24 #include <unordered_map>
25 #include <utility>
26 #include <vector>
27
28 #include "tensorflow/lite/tools/benchmark/benchmark_tflite_model.h"
29
30 #ifdef __ANDROID__
31 #include <android/log.h>
32 #endif
33
34 namespace tflite {
35 namespace benchmark {
36 namespace {
37
38 const char kOutputDir[] = "/sdcard/benchmark_output";
39 const char kSerializeDir[] = "/sdcard/serialize";
40
CreateDir(const char * path)41 bool CreateDir(const char* path) {
42 struct stat st;
43 if (stat(path, &st) != 0) {
44 if (mkdir(path, 0777) != 0 && errno != EEXIST) {
45 return false;
46 }
47 } else if (!S_ISDIR(st.st_mode)) {
48 errno = ENOTDIR;
49 return false;
50 }
51 return true;
52 }
53
54 class FirebaseReportingListener : public BenchmarkListener {
55 public:
FirebaseReportingListener(std::string tag,int report_fd)56 explicit FirebaseReportingListener(std::string tag, int report_fd)
57 : tag_(tag), report_fd_(report_fd) {
58 if (report_fd < 0) {
59 #ifdef __ANDROID__
60 __android_log_print(
61 ANDROID_LOG_ERROR, "tflite",
62 "Report would be streamed only to local log not to Firebase "
63 "since the Firebase log file is not opened.");
64 #else
65 fprintf(stderr,
66 "Report would be streamed only to local log not to Firebase "
67 "since the Firebase log file is not opened.");
68 #endif
69 }
70 }
71
OnBenchmarkEnd(const BenchmarkResults & results)72 void OnBenchmarkEnd(const BenchmarkResults& results) override {
73 ReportResult(results);
74 }
75
ReportFailure(TfLiteStatus status)76 void ReportFailure(TfLiteStatus status) {
77 std::string status_msg =
78 status == kTfLiteError
79 ? "TFLite error"
80 : (status == kTfLiteDelegateError ? "TFLite delegate error"
81 : "Unknown error code");
82 Report(status_msg, std::vector<std::pair<std::string, std::string>>());
83 }
84
85 private:
Report(const std::string & status,const std::vector<std::pair<std::string,std::string>> & contents)86 void Report(
87 const std::string& status,
88 const std::vector<std::pair<std::string, std::string>>& contents) {
89 // The output format of Firebase Game Loop test is json.
90 // https://firebase.google.com/docs/test-lab/android/game-loop#output-example
91 std::stringstream report;
92 report << "{\n"
93 << " \"name\": \"TFLite benchmark\",\n"
94 << " \"benchmark config\": \"" << tag_ << "\",\n"
95 << " \"status\": \"" << status << "\"";
96 for (const auto& content : contents) {
97 report << ",\n"
98 << " \"" << content.first << "\": \"" << content.second << "\"";
99 }
100 report << "\n}\n";
101
102 auto report_str = report.str();
103 if (report_fd_ >= 0) {
104 write(report_fd_, report_str.c_str(), report_str.size());
105 }
106
107 #ifdef __ANDROID__
108 __android_log_print(ANDROID_LOG_ERROR, "tflite", "%s", report_str.c_str());
109 #else
110 fprintf(stderr, "%s", report_str.c_str());
111 #endif
112 }
113
ReportResult(const BenchmarkResults & results)114 void ReportResult(const BenchmarkResults& results) {
115 std::vector<std::pair<std::string, std::string>> contents;
116 std::stringstream avg_time;
117 avg_time << "init: " << results.startup_latency_us() << ", "
118 << "warmup: " << results.warmup_time_us().avg() << ", "
119 << "inference: " << results.inference_time_us().avg();
120 contents.emplace_back("average time in us", avg_time.str());
121 std::stringstream overall_mem_usage;
122 overall_mem_usage << results.overall_mem_usage();
123 contents.emplace_back("overall memory usage", overall_mem_usage.str());
124
125 Report("OK", contents);
126 }
127
128 std::string tag_;
129 int report_fd_;
130 };
131
132 class CsvExportingListener : public BenchmarkListener {
133 public:
CsvExportingListener(std::string tag)134 explicit CsvExportingListener(std::string tag) : tag_(tag) {}
135
OnBenchmarkEnd(const BenchmarkResults & results)136 void OnBenchmarkEnd(const BenchmarkResults& results) override {
137 if (!CreateDir(kOutputDir)) {
138 #ifdef __ANDROID__
139 __android_log_print(ANDROID_LOG_ERROR, "tflite",
140 "Failed to create output directory %s.", kOutputDir);
141 #else
142 fprintf(stderr, "Failed to create output directory %s.", kOutputDir);
143 #endif
144 return;
145 }
146 WriteBenchmarkResultCsv(results);
147 }
148
149 private:
WriteBenchmarkResultCsv(const BenchmarkResults & results)150 void WriteBenchmarkResultCsv(const BenchmarkResults& results) {
151 auto init_us = results.startup_latency_us();
152 auto warmup_us = results.warmup_time_us();
153 auto inference_us = results.inference_time_us();
154 auto init_mem_usage = results.init_mem_usage();
155 auto overall_mem_usage = results.overall_mem_usage();
156
157 std::stringstream file_name;
158 file_name << kOutputDir << "/benchmark_result_" << tag_;
159
160 std::ofstream file;
161 file.open(file_name.str().c_str());
162 file << "config_key,model_size,init_time,"
163 << "warmup_avg,warmup_min,warmup_max,warmup_stddev,"
164 << "inference_avg,inference_min,inference_max,inference_stddev,"
165 << "init_max_rss,init_total_alloc,init_in_use_alloc,"
166 << "overall_max_rss,overall_total_alloc,overall_in_use_alloc\n";
167 file << tag_ << "," << results.model_size_mb() << "," << init_us << ","
168 << warmup_us.avg() << "," << warmup_us.min() << "," << warmup_us.max()
169 << "," << warmup_us.std_deviation() << "," << inference_us.avg() << ","
170 << inference_us.min() << "," << inference_us.max() << ","
171 << inference_us.std_deviation() << ","
172 << (init_mem_usage.max_rss_kb / 1024.0) << ","
173 << (init_mem_usage.total_allocated_bytes / 1024.0 / 1024.0) << ","
174 << (init_mem_usage.in_use_allocated_bytes / 1024.0 / 1024.0) << ","
175 << (overall_mem_usage.max_rss_kb / 1024.0) << ","
176 << (overall_mem_usage.total_allocated_bytes / 1024.0 / 1024.0) << ","
177 << (overall_mem_usage.in_use_allocated_bytes / 1024.0 / 1024.0)
178 << "\n";
179 file.close();
180 }
181
182 std::string tag_;
183 };
184
GetScenarioConfig(const std::string & library_dir,int scenario,std::vector<std::string> & args)185 std::string GetScenarioConfig(const std::string& library_dir, int scenario,
186 std::vector<std::string>& args) {
187 // The number of scenarios should equal to the value specified in
188 // AndroidManifest.xml file.
189 std::unordered_map<int, std::pair<std::string, std::vector<std::string>>>
190 all_scenarios = {
191 {1, {"cpu_1thread", {"--num_threads=1"}}},
192 {2, {"cpu_2threads", {"--num_threads=2"}}},
193 {3, {"cpu_4threads", {"--num_threads=4"}}},
194 {4, {"xnnpack_1thread", {"--use_xnnpack=true", "--num_threads=1"}}},
195 {5, {"xnnpack_2threads", {"--use_xnnpack=true", "--num_threads=2"}}},
196 {6, {"xnnpack_4threads", {"--use_xnnpack=true", "--num_threads=4"}}},
197 {7,
198 {"gpu_default",
199 {"--use_gpu=true", "--gpu_precision_loss_allowed=false"}}},
200 {8,
201 {"gpu_fp16",
202 {"--use_gpu=true", "--gpu_precision_loss_allowed=true"}}},
203 {9, {"dsp_hexagon", {"--use_hexagon=true"}}},
204 {10, {"nnapi", {"--use_nnapi=true"}}},
205 {11,
206 {"gpu_default_with_serialization",
207 {"--use_gpu=true", "--gpu_precision_loss_allowed=false",
208 "--delegate_serialize_token=dummy_token"}}},
209 {12,
210 {"gpu_fp16_with_serialization",
211 {"--use_gpu=true", "--gpu_precision_loss_allowed=true",
212 "--delegate_serialize_token=dummy_token"}}},
213 };
214
215 std::string tag;
216 args.emplace_back("(BenchmarkModelAndroid)");
217 args.emplace_back("--graph=/data/local/tmp/graph");
218
219 auto it = all_scenarios.find(scenario);
220 if (it != all_scenarios.end()) {
221 const auto& scenario_info = it->second;
222 tag = scenario_info.first;
223 for (const auto& arg : scenario_info.second) {
224 args.push_back(arg);
225 }
226 }
227 if (scenario == 9) {
228 std::stringstream hexagon_lib_path;
229 hexagon_lib_path << "--hexagon_lib_path=" << library_dir;
230 args.push_back(hexagon_lib_path.str());
231 }
232
233 if (scenario == 11 || scenario == 12) {
234 if (CreateDir(kSerializeDir)) {
235 std::stringstream serialize_dir;
236 serialize_dir << "--delegate_serialize_dir=" << kSerializeDir;
237 args.push_back(serialize_dir.str());
238 } else {
239 #ifdef __ANDROID__
240 __android_log_print(ANDROID_LOG_ERROR, "tflite",
241 "Failed to create serialize directory %s.",
242 kSerializeDir);
243 #else
244 fprintf(stderr, "Failed to create serialize directory %s.",
245 kSerializeDir);
246 #endif
247 }
248 }
249 return tag;
250 }
251
RunScenario(const std::string & library_dir,int scenario,int report_fd)252 void RunScenario(const std::string& library_dir, int scenario, int report_fd) {
253 std::vector<std::string> args;
254 std::string tag = GetScenarioConfig(library_dir, scenario, args);
255 std::vector<char*> argv;
256 argv.reserve(args.size());
257 for (auto& arg : args) {
258 argv.push_back(const_cast<char*>(arg.data()));
259 }
260
261 BenchmarkTfLiteModel benchmark;
262 FirebaseReportingListener firebaseReporting(tag, report_fd);
263 benchmark.AddListener(&firebaseReporting);
264 CsvExportingListener csvExporting(tag);
265 benchmark.AddListener(&csvExporting);
266 auto status = benchmark.Run(static_cast<int>(argv.size()), argv.data());
267 if (status != kTfLiteOk) {
268 firebaseReporting.ReportFailure(status);
269 }
270 }
271
272 } // namespace
273 } // namespace benchmark
274 } // namespace tflite
275
276 extern "C" {
277
278 JNIEXPORT void JNICALL
Java_org_tensorflow_lite_benchmark_firebase_BenchmarkModel_nativeRun(JNIEnv * env,jclass clazz,jstring library_dir,jint scenario,jint report_fd)279 Java_org_tensorflow_lite_benchmark_firebase_BenchmarkModel_nativeRun(
280 JNIEnv* env, jclass clazz, jstring library_dir, jint scenario,
281 jint report_fd) {
282 const char* lib_dir = env->GetStringUTFChars(library_dir, nullptr);
283
284 tflite::benchmark::RunScenario(lib_dir, static_cast<int>(scenario),
285 static_cast<int>(report_fd));
286
287 env->ReleaseStringUTFChars(library_dir, lib_dir);
288 }
289
290 } // extern "C"
291