xref: /aosp_15_r20/external/pytorch/torch/csrc/jit/passes/prepack_folding.cpp (revision da0073e96a02ea20f0ac840b70461e3646d07c45)
1 #include <stack>
2 
3 #include <torch/csrc/jit/api/module.h>
4 #include <torch/csrc/jit/passes/constant_pooling.h>
5 #include <torch/csrc/jit/passes/constant_propagation.h>
6 #include <torch/csrc/jit/passes/prepack_folding.h>
7 
8 namespace torch::jit {
9 
10 // Must run this pass after constant folding.
PrePackingOpsFolder(script::Module & m,const PrePackingOpsFilterFn & is_foldable_op,const std::string & attr_prefix)11 void PrePackingOpsFolder(
12     script::Module& m,
13     const PrePackingOpsFilterFn& is_foldable_op,
14     const std::string& attr_prefix) {
15   for (auto& method : m.get_methods()) {
16     int64_t uid = 0; // int + method name gives unique identifier
17     auto graph = method.graph();
18     std::stack<Block*> blocks_to_visit;
19     std::unordered_set<Node*> nodes_to_delete;
20     blocks_to_visit.push(graph->block());
21     std::string attr_name_base =
22         attr_prefix + "_" + method.name() + "._jit_pass_packed_weight_";
23     while (!blocks_to_visit.empty()) {
24       Block* b = blocks_to_visit.top();
25       blocks_to_visit.pop();
26       for (Node* n : b->nodes()) {
27         if (is_foldable_op(n)) {
28           auto optional_outputs = runNodeIfInputsAreConstant(n);
29           if (optional_outputs) {
30             auto outputs = optional_outputs.value();
31             TORCH_CHECK(outputs.size() == 1, "Prepack ops have single output");
32             auto attr_name = attr_name_base + std::to_string(uid++);
33             TORCH_CHECK(
34                 !(m.type()->findAttributeSlot(attr_name)),
35                 "Attribute name ",
36                 attr_name,
37                 " already exist in",
38                 " module of type:",
39                 m.type()->name()->qualifiedName(),
40                 ". Please make sure that",
41                 " FoldPrePackingOps is run at the top level module only.");
42             m.register_attribute(attr_name, n->output(0)->type(), outputs[0]);
43             Value* prepack_op_value = n->output(0);
44             WithInsertPoint ins(prepack_op_value->node());
45             Value* packed_weight_attr =
46                 graph->insertGetAttr(graph->inputs()[0], attr_name)
47                     ->setType(n->output(0)->type());
48             prepack_op_value->replaceAllUsesWith(packed_weight_attr);
49             nodes_to_delete.insert(n);
50           }
51         }
52         for (Block* subblock : n->blocks()) {
53           blocks_to_visit.push(subblock);
54         }
55       }
56     }
57     for (auto n : nodes_to_delete) {
58       n->removeAllInputs();
59     }
60     for (auto n : nodes_to_delete) {
61       n->destroy();
62     }
63   }
64 }
65 
66 } // namespace torch::jit
67