xref: /aosp_15_r20/external/armnn/tests/ModelAccuracyTool-Armnn/ModelAccuracyTool-Armnn.cpp (revision 89c4ff92f2867872bb9e2354d150bf0c8c502810)
1 //
2 // Copyright © 2017 Arm Ltd. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5 
6 #include "../ImageTensorGenerator/ImageTensorGenerator.hpp"
7 #include "../InferenceTest.hpp"
8 #include "ModelAccuracyChecker.hpp"
9 #include "armnnDeserializer/IDeserializer.hpp"
10 
11 #include <armnnUtils/Filesystem.hpp>
12 #include <armnnUtils/TContainer.hpp>
13 
14 #include <cxxopts/cxxopts.hpp>
15 #include <map>
16 
17 using namespace armnn::test;
18 
19 /** Load image names and ground-truth labels from the image directory and the ground truth label file
20  *
21  * @pre \p validationLabelPath exists and is valid regular file
22  * @pre \p imageDirectoryPath exists and is valid directory
23  * @pre labels in validation file correspond to images which are in lexicographical order with the image name
24  * @pre image index starts at 1
25  * @pre \p begIndex and \p endIndex are end-inclusive
26  *
27  * @param[in] validationLabelPath Path to validation label file
28  * @param[in] imageDirectoryPath  Path to directory containing validation images
29  * @param[in] begIndex            Begin index of images to be loaded. Inclusive
30  * @param[in] endIndex            End index of images to be loaded. Inclusive
31  * @param[in] excludelistPath     Path to excludelist file
32  * @return A map mapping image file names to their corresponding ground-truth labels
33  */
34 map<std::string, std::string> LoadValidationImageFilenamesAndLabels(const string& validationLabelPath,
35                                                                     const string& imageDirectoryPath,
36                                                                     size_t begIndex             = 0,
37                                                                     size_t endIndex             = 0,
38                                                                     const string& excludelistPath = "");
39 
40 /** Load model output labels from file
41  *
42  * @pre \p modelOutputLabelsPath exists and is a regular file
43  *
44  * @param[in] modelOutputLabelsPath path to model output labels file
45  * @return A vector of labels, which in turn is described by a list of category names
46  */
47 std::vector<armnnUtils::LabelCategoryNames> LoadModelOutputLabels(const std::string& modelOutputLabelsPath);
48 
main(int argc,char * argv[])49 int main(int argc, char* argv[])
50 {
51     try
52     {
53         armnn::LogSeverity level = armnn::LogSeverity::Debug;
54         armnn::ConfigureLogging(true, true, level);
55 
56         std::string modelPath;
57         std::string modelFormat;
58         std::vector<std::string> inputNames;
59         std::vector<std::string> outputNames;
60         std::string dataDir;
61         std::string modelOutputLabelsPath;
62         std::string validationLabelPath;
63         std::string inputLayout;
64         std::vector<armnn::BackendId> computeDevice;
65         std::string validationRange;
66         std::string excludelistPath;
67 
68         const std::string backendsMessage = "Which device to run layers on by default. Possible choices: "
69                                             + armnn::BackendRegistryInstance().GetBackendIdsAsString();
70 
71         try
72         {
73             cxxopts::Options options("ModeAccuracyTool-Armnn","Options");
74 
75             options.add_options()
76                 ("h,help", "Display help messages")
77                 ("m,model-path",
78                     "Path to armnn format model file",
79                     cxxopts::value<std::string>(modelPath))
80                 ("f,model-format",
81                     "The model format. Supported values: tflite",
82                     cxxopts::value<std::string>(modelFormat))
83                 ("i,input-name",
84                     "Identifier of the input tensors in the network separated by comma with no space.",
85                     cxxopts::value<std::vector<std::string>>(inputNames))
86                 ("o,output-name",
87                     "Identifier of the output tensors in the network separated by comma with no space.",
88                     cxxopts::value<std::vector<std::string>>(outputNames))
89                 ("d,data-dir",
90                     "Path to directory containing the ImageNet test data",
91                     cxxopts::value<std::string>(dataDir))
92                 ("p,model-output-labels",
93                     "Path to model output labels file.",
94                     cxxopts::value<std::string>(modelOutputLabelsPath))
95                 ("v,validation-labels-path",
96                     "Path to ImageNet Validation Label file",
97                     cxxopts::value<std::string>(validationLabelPath))
98                 ("l,data-layout",
99                     "Data layout. Supported value: NHWC, NCHW. Default: NHWC",
100                     cxxopts::value<std::string>(inputLayout)->default_value("NHWC"))
101                 ("c,compute",
102                     backendsMessage.c_str(),
103                     cxxopts::value<std::vector<armnn::BackendId>>(computeDevice)->default_value("CpuAcc,CpuRef"))
104                 ("r,validation-range",
105                     "The range of the images to be evaluated. Specified in the form <begin index>:<end index>."
106                     "The index starts at 1 and the range is inclusive."
107                     "By default the evaluation will be performed on all images.",
108                     cxxopts::value<std::string>(validationRange)->default_value("1:0"))
109                 ("e,excludelist-path",
110                     "Path to a excludelist file where each line denotes the index of an image to be "
111                     "excluded from evaluation.",
112                     cxxopts::value<std::string>(excludelistPath)->default_value(""));
113 
114             auto result = options.parse(argc, argv);
115 
116             if (result.count("help") > 0)
117             {
118                 std::cout << options.help() << std::endl;
119                 return EXIT_FAILURE;
120             }
121 
122             // Check for mandatory single options.
123             std::string mandatorySingleParameters[] = { "model-path", "model-format", "input-name", "output-name",
124                                                         "data-dir", "model-output-labels", "validation-labels-path" };
125             for (auto param : mandatorySingleParameters)
126             {
127                 if (result.count(param) != 1)
128                 {
129                     std::cerr << "Parameter \'--" << param << "\' is required but missing." << std::endl;
130                     return EXIT_FAILURE;
131                 }
132             }
133         }
134         catch (const cxxopts::OptionException& e)
135         {
136             std::cerr << e.what() << std::endl << std::endl;
137             return EXIT_FAILURE;
138         }
139         catch (const std::exception& e)
140         {
141             ARMNN_ASSERT_MSG(false, "Caught unexpected exception");
142             std::cerr << "Fatal internal error: " << e.what() << std::endl;
143             return EXIT_FAILURE;
144         }
145 
146         // Check if the requested backend are all valid
147         std::string invalidBackends;
148         if (!CheckRequestedBackendsAreValid(computeDevice, armnn::Optional<std::string&>(invalidBackends)))
149         {
150             ARMNN_LOG(fatal) << "The list of preferred devices contains invalid backend IDs: "
151                              << invalidBackends;
152             return EXIT_FAILURE;
153         }
154         armnn::Status status;
155 
156         // Create runtime
157         armnn::IRuntime::CreationOptions options;
158         armnn::IRuntimePtr runtime(armnn::IRuntime::Create(options));
159         std::ifstream file(modelPath);
160 
161         // Create Parser
162         using IParser = armnnDeserializer::IDeserializer;
163         auto armnnparser(IParser::Create());
164 
165         // Create a network
166         armnn::INetworkPtr network = armnnparser->CreateNetworkFromBinary(file);
167 
168         // Optimizes the network.
169         armnn::IOptimizedNetworkPtr optimizedNet(nullptr, nullptr);
170         try
171         {
172             optimizedNet = armnn::Optimize(*network, computeDevice, runtime->GetDeviceSpec());
173         }
174         catch (const armnn::Exception& e)
175         {
176             std::stringstream message;
177             message << "armnn::Exception (" << e.what() << ") caught from optimize.";
178             ARMNN_LOG(fatal) << message.str();
179             return EXIT_FAILURE;
180         }
181 
182         // Loads the network into the runtime.
183         armnn::NetworkId networkId;
184         status = runtime->LoadNetwork(networkId, std::move(optimizedNet));
185         if (status == armnn::Status::Failure)
186         {
187             ARMNN_LOG(fatal) << "armnn::IRuntime: Failed to load network";
188             return EXIT_FAILURE;
189         }
190 
191         // Set up Network
192         using BindingPointInfo = InferenceModelInternal::BindingPointInfo;
193 
194         // Handle inputNames and outputNames, there can be multiple.
195         std::vector<BindingPointInfo> inputBindings;
196         for(auto& input: inputNames)
197         {
198             const armnnDeserializer::BindingPointInfo&
199                     inputBindingInfo = armnnparser->GetNetworkInputBindingInfo(0, input);
200 
201             std::pair<armnn::LayerBindingId, armnn::TensorInfo>
202                     m_InputBindingInfo(inputBindingInfo.m_BindingId, inputBindingInfo.m_TensorInfo);
203             inputBindings.push_back(m_InputBindingInfo);
204         }
205 
206         std::vector<BindingPointInfo> outputBindings;
207         for(auto& output: outputNames)
208         {
209             const armnnDeserializer::BindingPointInfo&
210                     outputBindingInfo = armnnparser->GetNetworkOutputBindingInfo(0, output);
211 
212             std::pair<armnn::LayerBindingId, armnn::TensorInfo>
213                     m_OutputBindingInfo(outputBindingInfo.m_BindingId, outputBindingInfo.m_TensorInfo);
214             outputBindings.push_back(m_OutputBindingInfo);
215         }
216 
217         // Load model output labels
218         if (modelOutputLabelsPath.empty() || !fs::exists(modelOutputLabelsPath) ||
219             !fs::is_regular_file(modelOutputLabelsPath))
220         {
221             ARMNN_LOG(fatal) << "Invalid model output labels path at " << modelOutputLabelsPath;
222         }
223         const std::vector<armnnUtils::LabelCategoryNames> modelOutputLabels =
224             LoadModelOutputLabels(modelOutputLabelsPath);
225 
226         // Parse begin and end image indices
227         std::vector<std::string> imageIndexStrs = armnnUtils::SplitBy(validationRange, ":");
228         size_t imageBegIndex;
229         size_t imageEndIndex;
230         if (imageIndexStrs.size() != 2)
231         {
232             ARMNN_LOG(fatal) << "Invalid validation range specification: Invalid format " << validationRange;
233             return EXIT_FAILURE;
234         }
235         try
236         {
237             imageBegIndex = std::stoul(imageIndexStrs[0]);
238             imageEndIndex = std::stoul(imageIndexStrs[1]);
239         }
240         catch (const std::exception& e)
241         {
242             ARMNN_LOG(fatal) << "Invalid validation range specification: " << validationRange;
243             return EXIT_FAILURE;
244         }
245 
246         // Validate  excludelist file if it's specified
247         if (!excludelistPath.empty() &&
248             !(fs::exists(excludelistPath) && fs::is_regular_file(excludelistPath)))
249         {
250             ARMNN_LOG(fatal) << "Invalid path to excludelist file at " << excludelistPath;
251             return EXIT_FAILURE;
252         }
253 
254         fs::path pathToDataDir(dataDir);
255         const map<std::string, std::string> imageNameToLabel = LoadValidationImageFilenamesAndLabels(
256             validationLabelPath, pathToDataDir.string(), imageBegIndex, imageEndIndex, excludelistPath);
257         armnnUtils::ModelAccuracyChecker checker(imageNameToLabel, modelOutputLabels);
258 
259         if (ValidateDirectory(dataDir))
260         {
261             InferenceModel<armnnDeserializer::IDeserializer, float>::Params params;
262 
263             params.m_ModelPath      = modelPath;
264             params.m_IsModelBinary  = true;
265             params.m_ComputeDevices = computeDevice;
266             // Insert inputNames and outputNames into params vector
267             params.m_InputBindings.insert(std::end(params.m_InputBindings),
268                                           std::begin(inputNames),
269                                           std::end(inputNames));
270             params.m_OutputBindings.insert(std::end(params.m_OutputBindings),
271                                            std::begin(outputNames),
272                                            std::end(outputNames));
273 
274             using TParser = armnnDeserializer::IDeserializer;
275             // If dynamicBackends is empty it will be disabled by default.
276             InferenceModel<TParser, float> model(params, false, "");
277 
278             // Get input tensor information
279             const armnn::TensorInfo& inputTensorInfo   = model.GetInputBindingInfo().second;
280             const armnn::TensorShape& inputTensorShape = inputTensorInfo.GetShape();
281             const armnn::DataType& inputTensorDataType = inputTensorInfo.GetDataType();
282             armnn::DataLayout inputTensorDataLayout;
283             if (inputLayout == "NCHW")
284             {
285                 inputTensorDataLayout = armnn::DataLayout::NCHW;
286             }
287             else if (inputLayout == "NHWC")
288             {
289                 inputTensorDataLayout = armnn::DataLayout::NHWC;
290             }
291             else
292             {
293                 ARMNN_LOG(fatal) << "Invalid Data layout: " << inputLayout;
294                 return EXIT_FAILURE;
295             }
296             const unsigned int inputTensorWidth =
297                 inputTensorDataLayout == armnn::DataLayout::NCHW ? inputTensorShape[3] : inputTensorShape[2];
298             const unsigned int inputTensorHeight =
299                 inputTensorDataLayout == armnn::DataLayout::NCHW ? inputTensorShape[2] : inputTensorShape[1];
300             // Get output tensor info
301             const unsigned int outputNumElements = model.GetOutputSize();
302             // Check output tensor shape is valid
303             if (modelOutputLabels.size() != outputNumElements)
304             {
305                 ARMNN_LOG(fatal) << "Number of output elements: " << outputNumElements
306                                          << " , mismatches the number of output labels: " << modelOutputLabels.size();
307                 return EXIT_FAILURE;
308             }
309 
310             const unsigned int batchSize = 1;
311             // Get normalisation parameters
312             SupportedFrontend modelFrontend;
313             if (modelFormat == "tflite")
314             {
315                 modelFrontend = SupportedFrontend::TFLite;
316             }
317             else
318             {
319                 ARMNN_LOG(fatal) << "Unsupported frontend: " << modelFormat;
320                 return EXIT_FAILURE;
321             }
322             const NormalizationParameters& normParams = GetNormalizationParameters(modelFrontend, inputTensorDataType);
323             for (const auto& imageEntry : imageNameToLabel)
324             {
325                 const std::string imageName = imageEntry.first;
326                 std::cout << "Processing image: " << imageName << "\n";
327 
328                 vector<armnnUtils::TContainer> inputDataContainers;
329                 vector<armnnUtils::TContainer> outputDataContainers;
330 
331                 auto imagePath = pathToDataDir / fs::path(imageName);
332                 switch (inputTensorDataType)
333                 {
334                     case armnn::DataType::Signed32:
335                         inputDataContainers.push_back(
336                             PrepareImageTensor<int>(imagePath.string(),
337                             inputTensorWidth, inputTensorHeight,
338                             normParams,
339                             batchSize,
340                             inputTensorDataLayout));
341                         outputDataContainers = { vector<int>(outputNumElements) };
342                         break;
343                     case armnn::DataType::QAsymmU8:
344                         inputDataContainers.push_back(
345                             PrepareImageTensor<uint8_t>(imagePath.string(),
346                             inputTensorWidth, inputTensorHeight,
347                             normParams,
348                             batchSize,
349                             inputTensorDataLayout));
350                         outputDataContainers = { vector<uint8_t>(outputNumElements) };
351                         break;
352                     case armnn::DataType::Float32:
353                     default:
354                         inputDataContainers.push_back(
355                             PrepareImageTensor<float>(imagePath.string(),
356                             inputTensorWidth, inputTensorHeight,
357                             normParams,
358                             batchSize,
359                             inputTensorDataLayout));
360                         outputDataContainers = { vector<float>(outputNumElements) };
361                         break;
362                 }
363 
364                 status = runtime->EnqueueWorkload(networkId,
365                                                   armnnUtils::MakeInputTensors(inputBindings, inputDataContainers),
366                                                   armnnUtils::MakeOutputTensors(outputBindings, outputDataContainers));
367 
368                 if (status == armnn::Status::Failure)
369                 {
370                     ARMNN_LOG(fatal) << "armnn::IRuntime: Failed to enqueue workload for image: " << imageName;
371                 }
372 
373                 checker.AddImageResult<armnnUtils::TContainer>(imageName, outputDataContainers);
374             }
375         }
376         else
377         {
378             return EXIT_SUCCESS;
379         }
380 
381         for(unsigned int i = 1; i <= 5; ++i)
382         {
383             std::cout << "Top " << i <<  " Accuracy: " << checker.GetAccuracy(i) << "%" << "\n";
384         }
385 
386         ARMNN_LOG(info) << "Accuracy Tool ran successfully!";
387         return EXIT_SUCCESS;
388     }
389     catch (const armnn::Exception& e)
390     {
391         // Coverity fix: BOOST_LOG_TRIVIAL (typically used to report errors) may throw an
392         // exception of type std::length_error.
393         // Using stderr instead in this context as there is no point in nesting try-catch blocks here.
394         std::cerr << "Armnn Error: " << e.what() << std::endl;
395         return EXIT_FAILURE;
396     }
397     catch (const std::exception& e)
398     {
399         // Coverity fix: various boost exceptions can be thrown by methods called by this test.
400         std::cerr << "WARNING: ModelAccuracyTool-Armnn: An error has occurred when running the "
401                      "Accuracy Tool: " << e.what() << std::endl;
402         return EXIT_FAILURE;
403     }
404 }
405 
LoadValidationImageFilenamesAndLabels(const string & validationLabelPath,const string & imageDirectoryPath,size_t begIndex,size_t endIndex,const string & excludelistPath)406 map<std::string, std::string> LoadValidationImageFilenamesAndLabels(const string& validationLabelPath,
407                                                                     const string& imageDirectoryPath,
408                                                                     size_t begIndex,
409                                                                     size_t endIndex,
410                                                                     const string& excludelistPath)
411 {
412     // Populate imageFilenames with names of all .JPEG, .PNG images
413     std::vector<std::string> imageFilenames;
414     for (const auto& imageEntry : fs::directory_iterator(fs::path(imageDirectoryPath)))
415     {
416         fs::path imagePath = imageEntry.path();
417 
418         // Get extension and convert to uppercase
419         std::string imageExtension = imagePath.extension().string();
420         std::transform(imageExtension.begin(), imageExtension.end(), imageExtension.begin(), ::toupper);
421 
422         if (fs::is_regular_file(imagePath) && (imageExtension == ".JPEG" || imageExtension == ".PNG"))
423         {
424             imageFilenames.push_back(imagePath.filename().string());
425         }
426     }
427     if (imageFilenames.empty())
428     {
429         throw armnn::Exception("No image file (JPEG, PNG) found at " + imageDirectoryPath);
430     }
431 
432     // Sort the image filenames lexicographically
433     std::sort(imageFilenames.begin(), imageFilenames.end());
434 
435     std::cout << imageFilenames.size() << " images found at " << imageDirectoryPath << std::endl;
436 
437     // Get default end index
438     if (begIndex < 1 || endIndex > imageFilenames.size())
439     {
440         throw armnn::Exception("Invalid image index range");
441     }
442     endIndex = endIndex == 0 ? imageFilenames.size() : endIndex;
443     if (begIndex > endIndex)
444     {
445         throw armnn::Exception("Invalid image index range");
446     }
447 
448     // Load excludelist if there is one
449     std::vector<unsigned int> excludelist;
450     if (!excludelistPath.empty())
451     {
452         std::ifstream excludelistFile(excludelistPath);
453         unsigned int index;
454         while (excludelistFile >> index)
455         {
456             excludelist.push_back(index);
457         }
458     }
459 
460     // Load ground truth labels and pair them with corresponding image names
461     std::string classification;
462     map<std::string, std::string> imageNameToLabel;
463     ifstream infile(validationLabelPath);
464     size_t imageIndex          = begIndex;
465     size_t excludelistIndexCount = 0;
466     while (std::getline(infile, classification))
467     {
468         if (imageIndex > endIndex)
469         {
470             break;
471         }
472         // If current imageIndex is included in excludelist, skip the current image
473         if (excludelistIndexCount < excludelist.size() && imageIndex == excludelist[excludelistIndexCount])
474         {
475             ++imageIndex;
476             ++excludelistIndexCount;
477             continue;
478         }
479         imageNameToLabel.insert(std::pair<std::string, std::string>(imageFilenames[imageIndex - 1], classification));
480         ++imageIndex;
481     }
482     std::cout << excludelistIndexCount << " images in excludelist" << std::endl;
483     std::cout << imageIndex - begIndex - excludelistIndexCount << " images to be loaded" << std::endl;
484     return imageNameToLabel;
485 }
486 
LoadModelOutputLabels(const std::string & modelOutputLabelsPath)487 std::vector<armnnUtils::LabelCategoryNames> LoadModelOutputLabels(const std::string& modelOutputLabelsPath)
488 {
489     std::vector<armnnUtils::LabelCategoryNames> modelOutputLabels;
490     ifstream modelOutputLablesFile(modelOutputLabelsPath);
491     std::string line;
492     while (std::getline(modelOutputLablesFile, line))
493     {
494         armnnUtils::LabelCategoryNames tokens                  = armnnUtils::SplitBy(line, ":");
495         armnnUtils::LabelCategoryNames predictionCategoryNames = armnnUtils::SplitBy(tokens.back(), ",");
496         std::transform(predictionCategoryNames.begin(), predictionCategoryNames.end(), predictionCategoryNames.begin(),
497                        [](const std::string& category) { return armnnUtils::Strip(category); });
498         modelOutputLabels.push_back(predictionCategoryNames);
499     }
500     return modelOutputLabels;
501 }