xref: /aosp_15_r20/external/pytorch/torch/csrc/jit/backends/backend_interface.h (revision da0073e96a02ea20f0ac840b70461e3646d07c45)
1 #pragma once
2 
3 #include <torch/custom_class.h>
4 
5 namespace torch {
6 namespace jit {
7 
8 // Interface for a JIT backend.
9 class TORCH_API PyTorchBackendInterface : public torch::CustomClassHolder {
10  public:
11   PyTorchBackendInterface() noexcept;
12   ~PyTorchBackendInterface() override;
13 
14   // Returns true if the backend is available to process delegation calls.
15   virtual bool is_available() = 0;
16 
17   // Compile the module contained in \p processed using the details provided in
18   // \p method_compile_spec for each module method that should be compiled for
19   // the backend. \p method_compile_spec should be of type Dict<string, Any>.
20   // \returns a dictionary of type Dict<string, Any> that contains a backend
21   // handle each method that can run on the backend (i.e. each key in \p
22   // method_compile_spec).
23   virtual c10::impl::GenericDict compile(
24       c10::IValue processed,
25       c10::impl::GenericDict method_compile_spec) = 0;
26 
27   // Execute the method specified by \p handle using \p inputs. \returns the
28   // outputs as a tuple.
29   virtual c10::impl::GenericList execute(
30       c10::IValue handle,
31       c10::impl::GenericList inputs) = 0;
32 };
33 } // namespace jit
34 } // namespace torch
35