xref: /aosp_15_r20/external/pytorch/aten/src/ATen/native/xnnpack/Init.cpp (revision da0073e96a02ea20f0ac840b70461e3646d07c45)
1 #ifdef USE_XNNPACK
2 
3 #include <ATen/native/xnnpack/Common.h>
4 #include <c10/util/Exception.h>
5 
6 namespace at::native::xnnpack {
7 namespace internal {
8 namespace {
9 
10 bool is_initialized_ = false;
11 
initialize()12 bool initialize() {
13   using namespace internal;
14 
15   // This implementation allows for retries.
16   if (!is_initialized_) {
17     const xnn_status status = xnn_initialize(nullptr);
18     is_initialized_ = (xnn_status_success == status);
19 
20     if (!is_initialized_) {
21       if (xnn_status_out_of_memory == status) {
22         TORCH_WARN_ONCE("Failed to initialize XNNPACK! Reason: Out of memory.");
23       } else if (xnn_status_unsupported_hardware == status) {
24         TORCH_WARN_ONCE("Failed to initialize XNNPACK! Reason: Unsupported hardware.");
25       } else {
26         TORCH_WARN_ONCE("Failed to initialize XNNPACK! Reason: Unknown error!");
27       }
28     }
29   }
30 
31   return is_initialized_;
32 }
33 
deinitialize()34 bool C10_UNUSED deinitialize() {
35   using namespace internal;
36 
37   // This implementation allows for retries.
38   if (is_initialized_) {
39     const xnn_status status = xnn_deinitialize();
40     is_initialized_ = !(xnn_status_success == status);
41 
42     if (is_initialized_) {
43       TORCH_WARN_ONCE("Failed to uninitialize XNNPACK! Reason: Unknown error!");
44     }
45   }
46 
47   return !is_initialized_;
48 }
49 
50 } // namespace
51 } // namespace internal
52 
available()53 bool available() {
54   // Add extra conditions here that should disable mobile CPU impl at runtime in its totality.
55   return internal::initialize();
56 }
57 
58 } // namespace at::native::xnnpack
59 
60 #endif /* USE_XNNPACK */
61