xref: /aosp_15_r20/external/pytorch/torch/csrc/jit/jit_log.h (revision da0073e96a02ea20f0ac840b70461e3646d07c45)
1 #pragma once
2 #include <torch/csrc/Export.h>
3 #include <memory>
4 #include <ostream>
5 #include <string>
6 #include <unordered_map>
7 
8 // `TorchScript` offers a simple logging facility that can enabled by setting an
9 // environment variable `PYTORCH_JIT_LOG_LEVEL`.
10 
11 // Logging is enabled on a per file basis. To enable logging in
12 // `dead_code_elimination.cpp`, `PYTORCH_JIT_LOG_LEVEL` should be
13 // set to `dead_code_elimination.cpp` or, simply, to `dead_code_elimination`
14 // (i.e. `PYTORCH_JIT_LOG_LEVEL=dead_code_elimination`).
15 
16 // Multiple files can be logged by separating each file name with a colon `:` as
17 // in the following example,
18 // `PYTORCH_JIT_LOG_LEVEL=dead_code_elimination:guard_elimination`
19 
20 // There are 3 logging levels available for your use ordered by the detail level
21 // from lowest to highest.
22 
23 // * `GRAPH_DUMP` should be used for printing entire graphs after optimization
24 // passes
25 // * `GRAPH_UPDATE` should be used for reporting graph transformations (i.e.
26 // node deletion, constant folding, etc)
27 // * `GRAPH_DEBUG` should be used for providing information useful for debugging
28 //   the internals of a particular optimization pass or analysis
29 
30 // The default logging level is `GRAPH_DUMP` meaning that only `GRAPH_DUMP`
31 // statements will be enabled when one specifies a file(s) in
32 // `PYTORCH_JIT_LOG_LEVEL`.
33 
34 // `GRAPH_UPDATE` can be enabled by prefixing a file name with an `>` as in
35 // `>alias_analysis`.
36 // `GRAPH_DEBUG` can be enabled by prefixing a file name with an `>>` as in
37 // `>>alias_analysis`.
38 // `>>>` is also valid and **currently** is equivalent to `GRAPH_DEBUG` as there
39 // is no logging level that is higher than `GRAPH_DEBUG`.
40 
41 namespace torch::jit {
42 
43 struct Node;
44 struct Graph;
45 
46 enum class JitLoggingLevels {
47   GRAPH_DUMP = 0,
48   GRAPH_UPDATE,
49   GRAPH_DEBUG,
50 };
51 
52 TORCH_API std::string get_jit_logging_levels();
53 
54 TORCH_API void set_jit_logging_levels(std::string level);
55 
56 TORCH_API void set_jit_logging_output_stream(std::ostream& out_stream);
57 
58 TORCH_API std::ostream& get_jit_logging_output_stream();
59 
60 TORCH_API std::string getHeader(const Node* node);
61 
62 TORCH_API std::string log_function(const std::shared_ptr<Graph>& graph);
63 
64 TORCH_API ::torch::jit::JitLoggingLevels jit_log_level();
65 
66 // Prefix every line in a multiline string \p IN_STR with \p PREFIX.
67 TORCH_API std::string jit_log_prefix(
68     const std::string& prefix,
69     const std::string& in_str);
70 
71 TORCH_API std::string jit_log_prefix(
72     ::torch::jit::JitLoggingLevels level,
73     const char* fn,
74     int l,
75     const std::string& in_str);
76 
77 TORCH_API bool is_enabled(
78     const char* cfname,
79     ::torch::jit::JitLoggingLevels level);
80 
81 TORCH_API std::ostream& operator<<(
82     std::ostream& out,
83     ::torch::jit::JitLoggingLevels level);
84 
85 #define JIT_LOG(level, ...)                                         \
86   if (is_enabled(__FILE__, level)) {                                \
87     ::torch::jit::get_jit_logging_output_stream()                   \
88         << ::torch::jit::jit_log_prefix(                            \
89                level, __FILE__, __LINE__, ::c10::str(__VA_ARGS__)); \
90   }
91 
92 // tries to reconstruct original python source
93 #define SOURCE_DUMP(MSG, G)                       \
94   JIT_LOG(                                        \
95       ::torch::jit::JitLoggingLevels::GRAPH_DUMP, \
96       MSG,                                        \
97       "\n",                                       \
98       ::torch::jit::log_function(G));
99 // use GRAPH_DUMP for dumping graphs after optimization passes
100 #define GRAPH_DUMP(MSG, G) \
101   JIT_LOG(                 \
102       ::torch::jit::JitLoggingLevels::GRAPH_DUMP, MSG, "\n", (G)->toString());
103 // use GRAPH_UPDATE for reporting graph transformations (i.e. node deletion,
104 // constant folding, CSE)
105 #define GRAPH_UPDATE(...) \
106   JIT_LOG(::torch::jit::JitLoggingLevels::GRAPH_UPDATE, __VA_ARGS__);
107 // use GRAPH_DEBUG to provide information useful for debugging a particular opt
108 // pass
109 #define GRAPH_DEBUG(...) \
110   JIT_LOG(::torch::jit::JitLoggingLevels::GRAPH_DEBUG, __VA_ARGS__);
111 // use GRAPH_EXPORT to export a graph so that the IR can be loaded by a script
112 #define GRAPH_EXPORT(MSG, G)                       \
113   JIT_LOG(                                         \
114       ::torch::jit::JitLoggingLevels::GRAPH_DEBUG, \
115       MSG,                                         \
116       "\n<GRAPH_EXPORT>\n",                        \
117       (G)->toString(),                             \
118       "</GRAPH_EXPORT>");
119 
120 #define GRAPH_DUMP_ENABLED \
121   (is_enabled(__FILE__, ::torch::jit::JitLoggingLevels::GRAPH_DUMP))
122 #define GRAPH_UPDATE_ENABLED \
123   (is_enabled(__FILE__, ::torch::jit::JitLoggingLevels::GRAPH_UPDATE))
124 #define GRAPH_DEBUG_ENABLED \
125   (is_enabled(__FILE__, ::torch::jit::JitLoggingLevels::GRAPH_DEBUG))
126 } // namespace torch::jit
127