1 // Copyright (C) 2023 The Android Open Source Project
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 //      http://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14 
15 /**
16  *@file  virtualtemp_estimator_test.cc
17  * Test application to verify virtualtemp estimator
18  *
19  */
20 // Test application to run and verify virtualtemp estimator interface unit tests
21 
22 #include "virtualtemp_estimator.h"
23 
24 #include <android-base/file.h>
25 #include <android-base/logging.h>
26 #include <android-base/parsedouble.h>
27 #include <android-base/properties.h>
28 #include <android-base/strings.h>
29 #include <cutils/properties.h>
30 #include <cutils/trace.h>
31 #include <json/reader.h>
32 #include <json/value.h>
33 #include <json/writer.h>
34 #include <log/log.h>
35 #include <malloc.h>
36 #include <stdio.h>
37 #include <stdlib.h>
38 #include <string.h>
39 #include <sys/time.h>
40 
41 #include <climits>
42 #include <fstream>
43 #include <iostream>
44 #include <sstream>
45 
46 constexpr std::string_view kDefaultModel("/vendor/etc/vt_estimation_model.tflite");
47 constexpr std::string_view kConfigProperty("vendor.thermal.config");
48 constexpr std::string_view kConfigDefaultFileName("thermal_info_config.json");
49 constexpr std::string_view kTestSensorName("virtual-skin-model-test");
50 constexpr int kmillion = 1000000;
51 constexpr int klog_interval_usec = 10 * kmillion;
52 
get_elapsed_time_usec(struct timeval start,struct timeval end)53 static inline unsigned long get_elapsed_time_usec(struct timeval start, struct timeval end) {
54     unsigned long elapsed_time = (end.tv_sec - start.tv_sec) * kmillion;
55     elapsed_time += (end.tv_usec - start.tv_usec);
56 
57     return elapsed_time;
58 }
59 
get_input_combination(std::string_view thermal_config_path)60 static std::vector<std::string> get_input_combination(std::string_view thermal_config_path) {
61     std::vector<std::string> result;
62     std::string json_doc;
63     if (!android::base::ReadFileToString(thermal_config_path.data(), &json_doc)) {
64         std::cout << "Failed to read JSON config from " << thermal_config_path.data();
65         return result;
66     }
67 
68     Json::Value root;
69     Json::CharReaderBuilder reader_builder;
70     std::unique_ptr<Json::CharReader> reader(reader_builder.newCharReader());
71     std::string errorMessage;
72 
73     if (!reader->parse(&*json_doc.begin(), &*json_doc.end(), &root, &errorMessage)) {
74         std::cout << "Failed to parse JSON config: " << errorMessage;
75         return result;
76     }
77 
78     Json::Value sensors = root["Sensors"];
79     if (sensors.size() == 0) {
80         std::cout << "Error: sensors size is zero in thermal config\n";
81         return result;
82     }
83 
84     for (Json::Value::ArrayIndex i = 0; i < sensors.size(); ++i) {
85         const std::string &name = sensors[i]["Name"].asString();
86         if (name == "VIRTUAL-SKIN-MODEL") {
87             Json::Value values = sensors[i]["Combination"];
88             if (values.size() == 0) {
89                 return result;
90             }
91 
92             std::cout << "Combination for VIRTUAL-SKIN-MODEL : [";
93             for (Json::Value::ArrayIndex j = 0; j < values.size(); ++j) {
94                 result.push_back(values[j].asString());
95                 std::cout << result.back() << ", ";
96             }
97             std::cout << "]\n\n";
98         }
99     }
100 
101     return result;
102 }
103 
run_random_input_inference(std::string_view model_path,std::string_view thermal_config_path,int min_inference_count,int inference_delay_sec,int prev_samples_order)104 static int run_random_input_inference(std::string_view model_path,
105                                       std::string_view thermal_config_path, int min_inference_count,
106                                       int inference_delay_sec, int prev_samples_order) {
107     std::vector<float> output;
108     unsigned long prev_log_time = 0;
109     thermal::vtestimator::VtEstimatorStatus ret;
110     std::vector<std::string> input_combination = get_input_combination(thermal_config_path.data());
111     int input_size = input_combination.size();
112 
113     // Create and Initialize vtestimator
114     thermal::vtestimator::VirtualTempEstimator vt_estimator_(
115             kTestSensorName, thermal::vtestimator::kUseMLModel, input_size);
116     ::thermal::vtestimator::VtEstimationInitData init_data(thermal::vtestimator::kUseMLModel);
117     init_data.ml_model_init_data.model_path = model_path;
118     init_data.ml_model_init_data.prev_samples_order = prev_samples_order;
119     init_data.ml_model_init_data.use_prev_samples = (prev_samples_order > 1) ? true : false;
120 
121     std::cout << "Initialize estimator\n";
122     ret = vt_estimator_.Initialize(init_data);
123     if (ret != thermal::vtestimator::kVtEstimatorOk) {
124         std::cout << "Failed to Initialize estimator (ret: " << ret << ")\n";
125         return -1;
126     }
127 
128     struct timeval start_loop_time;
129     int inference_count = 0;
130     unsigned long max_inference_time = 0, min_inference_time = ULONG_MAX;
131     unsigned long sum_inference_time = 0;
132     float avg_inference_time = 0;
133     std::vector<unsigned long> inference_times;
134 
135     std::srand(time(NULL));
136     gettimeofday(&start_loop_time, nullptr);
137     do {
138         struct timeval begin, end;
139         std::vector<float> thermistors;
140 
141         // preparing random inputs with starting temperature between 0C to 50C
142         int r = std::rand() % 50000;
143         for (int i = 0; i < input_size; ++i) {
144             thermistors.push_back(r + i * 1000);
145         }
146 
147         gettimeofday(&begin, nullptr);
148         ret = vt_estimator_.Estimate(thermistors, &output);
149         gettimeofday(&end, nullptr);
150         if (ret != thermal::vtestimator::kVtEstimatorOk) {
151             std::cout << "Failed to run estimator (ret: " << ret << ")\n";
152             return -1;
153         }
154 
155         std::cout << "inference_count: " << inference_count << " random_value (r): " << r
156                   << " output: ";
157         for (size_t i = 0; i < output.size(); ++i) {
158             std::cout << output[i] << " ";
159         }
160         std::cout << "\n";
161 
162         for (size_t i = 0; output.size(); ++i) {
163             if (output[i] > 55000) {
164                 std::cout << "Temperature at index [i] above 55C observed\n";
165                 return -1;
166             }
167         }
168 
169         unsigned long inference_time_usec = get_elapsed_time_usec(begin, end);
170 
171         inference_count++;
172         max_inference_time = std::max(max_inference_time, inference_time_usec);
173         min_inference_time = std::min(min_inference_time, inference_time_usec);
174         sum_inference_time += inference_time_usec;
175         avg_inference_time = sum_inference_time / inference_count;
176         inference_times.push_back(inference_time_usec);
177 
178         unsigned long elapsed_time = get_elapsed_time_usec(start_loop_time, end);
179         if (elapsed_time - prev_log_time >= klog_interval_usec) {
180             std::cout << "elapsed_time_sec: " << elapsed_time / kmillion
181                       << " inference_count: " << inference_count
182                       << " min_inference_time: " << min_inference_time
183                       << " max_inference_time: " << max_inference_time
184                       << " avg_inference_time: " << avg_inference_time << std::endl;
185             prev_log_time = elapsed_time;
186         }
187 
188         if (inference_delay_sec)
189             sleep(inference_delay_sec);
190     } while (inference_count < min_inference_count);
191 
192     std::cout << "\n\ntotal inference count: " << inference_count << std::endl;
193     std::cout << "total inference time: " << sum_inference_time << std::endl;
194     std::cout << "avg_inference_time: " << avg_inference_time << std::endl;
195     std::cout << "min_inference_time: " << min_inference_time << std::endl;
196     std::cout << "max_inference_time: " << max_inference_time << std::endl;
197 
198     std::sort(inference_times.begin(), inference_times.end());
199     std::cout << "\n\n";
200     std::cout << "p50: " << inference_times[inference_count * 0.5] << std::endl;
201     std::cout << "p90: " << inference_times[inference_count * 0.9] << std::endl;
202 
203     return 0;
204 }
205 
run_single_inference(std::string_view model_path,std::string_view thermal_config_path,char * input,int prev_samples_order)206 static int run_single_inference(std::string_view model_path, std::string_view thermal_config_path,
207                                 char *input, int prev_samples_order) {
208     if (!input) {
209         std::cout << "input is nullptr" << std::endl;
210         return -1;
211     }
212 
213     std::vector<std::string> input_combination = get_input_combination(thermal_config_path.data());
214     int num_linked_sensors = input_combination.size();
215 
216     std::cout << "Parsing thermistors from input string: ";
217     std::vector<float> thermistors;
218     char *ip = input;
219     char *saveptr;
220     ip = strtok_r(ip, " ", &saveptr);
221     while (ip) {
222         float thermistor_value;
223 
224         if (sscanf(ip, "%f", &thermistor_value) != 1) {
225             std::cout << "inputs parsing failed";
226         }
227 
228         std::cout << thermistor_value << " ";
229         thermistors.push_back(thermistor_value);
230 
231         ip = strtok_r(NULL, " ", &saveptr);
232     }
233     std::cout << std::endl;
234     std::cout << "thermistors.size(): " << thermistors.size() << "\n\n";
235 
236     size_t total_num_samples = num_linked_sensors * prev_samples_order;
237     size_t cur_size = thermistors.size();
238     int count = 0;
239 
240     // If there are not enough samples, repeat input data
241     while (cur_size < total_num_samples) {
242         thermistors.push_back(thermistors[count++ % num_linked_sensors]);
243         cur_size++;
244     }
245 
246     std::vector<float> output;
247     thermal::vtestimator::VtEstimatorStatus ret;
248 
249     // Create and Initialize vtestimator
250     thermal::vtestimator::VirtualTempEstimator vt_estimator_(
251             kTestSensorName, thermal::vtestimator::kUseMLModel, num_linked_sensors);
252     ::thermal::vtestimator::VtEstimationInitData init_data(thermal::vtestimator::kUseMLModel);
253     init_data.ml_model_init_data.model_path = model_path;
254     init_data.ml_model_init_data.prev_samples_order = prev_samples_order;
255     init_data.ml_model_init_data.use_prev_samples = (prev_samples_order > 1) ? true : false;
256 
257     std::cout << "Initialize estimator\n";
258     ret = vt_estimator_.Initialize(init_data);
259     if (ret != thermal::vtestimator::kVtEstimatorOk) {
260         std::cout << "Failed to Initialize estimator (ret: " << ret << ")\n";
261         return -1;
262     }
263 
264     // Run vtestimator in a loop to feed in all previous and current samples
265     std::cout << "run estimator\n";
266     int loop_count = 0;
267     do {
268         int start_index = loop_count * num_linked_sensors;
269         std::vector<float>::const_iterator first = thermistors.begin() + start_index;
270         std::vector<float>::const_iterator last = first + num_linked_sensors;
271         std::vector<float> input_data(first, last);
272 
273         std::cout << "input_data.size(): " << input_data.size() << "\n";
274         std::cout << "input_data: [";
275         for (size_t i = 0; i < input_data.size(); ++i) {
276             std::cout << input_data[i] << " ";
277         }
278         std::cout << "]\n";
279 
280         ret = vt_estimator_.Estimate(input_data, &output);
281         if ((ret != thermal::vtestimator::kVtEstimatorOk) &&
282             (ret != thermal::vtestimator::kVtEstimatorUnderSampling)) {
283             std::cout << "Failed to run estimator (ret: " << ret << ")\n";
284             return -1;
285         }
286         if ((ret == thermal::vtestimator::kVtEstimatorUnderSampling) &&
287             (loop_count > prev_samples_order)) {
288             std::cout << "Undersampling for more than prev sample order (ret: " << ret << ")\n";
289             return -1;
290         }
291         loop_count++;
292     } while (loop_count < prev_samples_order);
293 
294     std::cout << "output: ";
295     for (size_t i = 0; i < output.size(); ++i) {
296         std::cout << output[i] << " ";
297     }
298     std::cout << std::endl;
299     return 0;
300 }
301 
run_batch_process(std::string_view model_path,std::string_view thermal_config_path,const char * input_file,const char * output_file,int prev_samples_order)302 static int run_batch_process(std::string_view model_path, std::string_view thermal_config_path,
303                              const char *input_file, const char *output_file,
304                              int prev_samples_order) {
305     if (!input_file || !output_file) {
306         std::cout << "input and output files required for batch process\n";
307         return -1;
308     }
309 
310     std::cout << "get_input_combination(): ";
311     std::vector<std::string> input_combination = get_input_combination(thermal_config_path.data());
312     if (input_combination.size() == 0) {
313         LOG(ERROR) << "Invalid input_combination";
314         return -1;
315     }
316 
317     thermal::vtestimator::VtEstimatorStatus ret;
318     thermal::vtestimator::VirtualTempEstimator vt_estimator_(
319             kTestSensorName, thermal::vtestimator::kUseMLModel, input_combination.size());
320     ::thermal::vtestimator::VtEstimationInitData init_data(thermal::vtestimator::kUseMLModel);
321     init_data.ml_model_init_data.model_path = model_path;
322     init_data.ml_model_init_data.prev_samples_order = prev_samples_order;
323     init_data.ml_model_init_data.use_prev_samples = (prev_samples_order > 1) ? true : false;
324 
325     std::cout << "Initialize estimator\n";
326     ret = vt_estimator_.Initialize(init_data);
327     if (ret != thermal::vtestimator::kVtEstimatorOk) {
328         std::cout << "Failed to Initialize estimator (ret: " << ret << ")\n";
329         return -1;
330     }
331 
332     std::string json_doc;
333     if (!android::base::ReadFileToString(input_file, &json_doc)) {
334         LOG(ERROR) << "Failed to read JSON config from " << input_file;
335         return -1;
336     }
337     Json::Value root;
338     Json::CharReaderBuilder reader_builder;
339     std::unique_ptr<Json::CharReader> reader(reader_builder.newCharReader());
340     std::string errorMessage;
341 
342     if (!reader->parse(&*json_doc.begin(), &*json_doc.end(), &root, &errorMessage)) {
343         LOG(ERROR) << "Failed to parse JSON config: " << errorMessage;
344         return -1;
345     }
346 
347     std::cout << "Number of testcases " << root.size() << std::endl;
348 
349     for (auto const &testcase_name : root.getMemberNames()) {
350         if (testcase_name == "Metadata") {
351             continue;
352         }
353 
354         Json::Value testcase = root[testcase_name];
355         Json::Value model_vt_outputs;
356         int loop_count = testcase[input_combination[0]].size();
357 
358         std::cout << "tc: " << testcase_name << " count: " << loop_count << std::endl;
359         for (int i = 0; i < loop_count; ++i) {
360             std::vector<float> model_inputs;
361             std::vector<float> model_outputs;
362             int num_inputs = input_combination.size();
363             constexpr int kCelsius2mC = 1000;
364 
365             for (int j = 0; j < num_inputs; ++j) {
366                 std::string input_name = input_combination[j];
367                 std::string value_str = testcase[input_name][std::to_string(i)].asString();
368 
369                 std::cout << "tc[" << testcase_name << "] entry[" << i << "] input[" << input_name
370                           << "] value_str[" << value_str << "]\n";
371 
372                 float value;
373                 if (android::base::ParseFloat(value_str, &value) == false) {
374                     std::cout << "Failed to parse value_str : " << value_str << " to float\n";
375                 }
376 
377                 model_inputs.push_back(value * kCelsius2mC);
378             }
379 
380             ret = vt_estimator_.Estimate(model_inputs, &model_outputs);
381             if (ret != thermal::vtestimator::kVtEstimatorOk) {
382                 std::cout << "Failed to run estimator (ret: " << ret << ")\n";
383                 return -1;
384             }
385 
386             std::ostringstream model_out_string;
387             for (size_t i = 0; i < model_outputs.size(); ++i) {
388                 model_outputs[i] /= kCelsius2mC;
389                 model_out_string << model_outputs[i] << " ";
390             }
391             model_vt_outputs[std::to_string(i)] = model_out_string.str();
392         }
393 
394         testcase["model_vt"] = model_vt_outputs;
395         root[testcase_name] = testcase;
396         std::cout << "completed testcase_name: " << testcase_name << std::endl;
397     }
398 
399     Json::StreamWriterBuilder writer_builder;
400     writer_builder["indentation"] = "";
401     std::unique_ptr<Json::StreamWriter> writer(writer_builder.newStreamWriter());
402     std::ofstream output_stream(output_file, std::ofstream::out);
403     writer->write(root, &output_stream);
404 
405     return 0;
406 }
407 
print_usage()408 void print_usage() {
409     std::string message = "usage: \n";
410     message += "-m : input mode (";
411     message += "0: single inference ";
412     message += "1: json input file ";
413     message += "2: generate random inputs) \n";
414     message += "-p : path to model file \n";
415     message += "-t : path to thermal config file \n";
416     message += "-i : input samples (mode 0), path to input file (mode 1) \n";
417     message += "-o : output file (mode 1) \n";
418     message += "-d : delay between inferences in seconds (mode 2) \n";
419     message += "-c : inference count (mode 2)";
420     message += "-s : prev_samples_order";
421 
422     std::cout << message << std::endl;
423 }
424 
main(int argc,char * argv[])425 int main(int argc, char *argv[]) {
426     int c, mode = -1;
427     char *input = nullptr, *output = nullptr;
428     std::string model_path, thermal_config_path;
429     int min_inference_count = -1;
430     int inference_delay_sec = 0;
431     int prev_samples_order = 1;
432 
433     while ((c = getopt(argc, argv, "hm:p:i:c:o:d:t:s:")) != -1) switch (c) {
434             case 'm':
435                 mode = atoi(optarg);
436                 std::cout << "mode: " << mode << std::endl;
437                 break;
438             case 'p':
439                 model_path = optarg;
440                 std::cout << "model_path: " << model_path << std::endl;
441                 break;
442             case 's':
443                 prev_samples_order = atoi(optarg);
444                 std::cout << "prev_samples_order: " << prev_samples_order << std::endl;
445                 break;
446             case 't':
447                 thermal_config_path = optarg;
448                 std::cout << "thermal_config_path: " << thermal_config_path << std::endl;
449                 break;
450             case 'i':
451                 input = optarg;
452                 std::cout << "input: " << input << std::endl;
453                 break;
454             case 'o':
455                 output = optarg;
456                 std::cout << "output: " << output << std::endl;
457                 break;
458             case 'c':
459                 min_inference_count = atoi(optarg);
460                 std::cout << "min_inference_count: " << min_inference_count << std::endl;
461                 break;
462             case 'd':
463                 inference_delay_sec = atoi(optarg);
464                 std::cout << "inference_delay_sec : " << inference_delay_sec << std::endl;
465                 break;
466             case 'h':
467                 print_usage();
468                 return 0;
469             default:
470                 std::cout << "unsupported option " << c << std::endl;
471                 abort();
472         }
473 
474     if (model_path.empty()) {
475         model_path = kDefaultModel;
476         std::cout << "Using default model_path: " << model_path << std::endl;
477     }
478 
479     if (thermal_config_path.empty()) {
480         thermal_config_path =
481                 "/vendor/etc/" +
482                 android::base::GetProperty(kConfigProperty.data(), kConfigDefaultFileName.data());
483         std::cout << "Using default thermal config: " << thermal_config_path << std::endl;
484     }
485 
486     int ret = -1;
487     switch (mode) {
488         case 0:
489             ret = run_single_inference(model_path, thermal_config_path, input, prev_samples_order);
490             break;
491         case 1:
492             ret = run_batch_process(model_path, thermal_config_path, input, output,
493                                     prev_samples_order);
494             break;
495         case 2:
496             ret = run_random_input_inference(model_path, thermal_config_path, min_inference_count,
497                                              inference_delay_sec, prev_samples_order);
498             break;
499         default:
500             std::cout << "unsupported mode" << std::endl;
501             print_usage();
502             break;
503     }
504 
505     std::cout << "Exiting" << std::endl;
506     fflush(stdout);
507 
508     return ret;
509 }
510