xref: /aosp_15_r20/external/pytorch/torch/csrc/jit/mobile/code.h (revision da0073e96a02ea20f0ac840b70461e3646d07c45)
1 #pragma once
2 
3 #include <vector>
4 
5 #include <ATen/core/ivalue.h>
6 #include <ATen/core/operator_name.h>
7 #include <torch/csrc/jit/runtime/instruction.h>
8 
9 namespace torch::jit::mobile {
10 
11 using Stack = std::vector<c10::IValue>;
12 using DebugHandle = int64_t;
13 
14 class Function;
15 
16 struct Code {
17   std::vector<Instruction> instructions_;
18   std::vector<DebugHandle> debug_handles_;
19   std::vector<c10::OperatorName> op_names_;
20   std::vector<int> operator_input_sizes_;
21   std::vector<std::function<void(Stack&)>> operators_;
22   std::vector<c10::IValue> constants_;
23   std::vector<c10::TypePtr> types_;
24   // TODO After we actually export CALL instructions we can remove this.
25   // We may need a two-stage importing scheme, where we firstly construct all
26   // function objects, and then append referenced function pointers. This could
27   // be done in parseMethods().
28   std::vector<mobile::Function*> functions_;
29   size_t register_size_ = 0; // Aggregated output size.
30   // initialized means operators_ array is filled with operators
31   bool initialized = false;
32 };
33 
34 } // namespace torch::jit::mobile
35