xref: /aosp_15_r20/external/tensorflow/tensorflow/compiler/mlir/lite/tf_tfl_translate_cl.cc (revision b6fb3261f9314811a0f4371741dbb8839866f948)
1 /* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
2 
3 Licensed under the Apache License, Version 2.0 (the "License");
4 you may not use this file except in compliance with the License.
5 You may obtain a copy of the License at
6 
7     http://www.apache.org/licenses/LICENSE-2.0
8 
9 Unless required by applicable law or agreed to in writing, software
10 distributed under the License is distributed on an "AS IS" BASIS,
11 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 See the License for the specific language governing permissions and
13 limitations under the License.
14 ==============================================================================*/
15 
16 #include "tensorflow/compiler/mlir/lite/tf_tfl_translate_cl.h"
17 
18 using llvm::cl::opt;
19 
20 // TODO(jpienaar): Revise the command line option parsing here.
21 // NOLINTNEXTLINE
22 opt<std::string> input_file_name(llvm::cl::Positional,
23                                  llvm::cl::desc("<input file>"),
24                                  llvm::cl::init("-"));
25 
26 // NOLINTNEXTLINE
27 opt<bool> import_saved_model_object_graph(
28     "savedmodel-objectgraph-to-mlir",
29     llvm::cl::desc("Import a saved model to its MLIR representation"),
30     llvm::cl::value_desc("dir"));
31 
32 // NOLINTNEXTLINE
33 opt<bool> import_saved_model_signature_defs(
34     "savedmodel-signaturedefs-to-mlir",
35     llvm::cl::desc("Import a saved model V1 to its MLIR representation"),
36     llvm::cl::value_desc("dir"));
37 
38 // NOLINTNEXTLINE
39 opt<std::string> saved_model_tags(
40     "tf-savedmodel-tags",
41     llvm::cl::desc("Tags used to indicate which MetaGraphDef to import, "
42                    "separated by ','"),
43     llvm::cl::init("serve"));
44 
45 // NOLINTNEXTLINE
46 opt<std::string> saved_model_exported_names(
47     "tf-savedmodel-exported-names",
48     llvm::cl::desc("Names to export from SavedModel, separated by ','. Empty "
49                    "(the default) means export all."),
50     llvm::cl::init(""));
51 
52 // NOLINTNEXTLINE
53 opt<std::string> output_file_name("o", llvm::cl::desc("<output file>"),
54                                   llvm::cl::value_desc("filename"),
55                                   llvm::cl::init("-"));
56 // NOLINTNEXTLINE
57 opt<bool> use_splatted_constant(
58     "use-splatted-constant",
59     llvm::cl::desc(
60         "Replace constants with randomly generated splatted tensors"),
61     llvm::cl::init(false), llvm::cl::Hidden);
62 // NOLINTNEXTLINE
63 opt<bool> input_mlir(
64     "input-mlir",
65     llvm::cl::desc("Take input TensorFlow model in textual MLIR instead of "
66                    "GraphDef format"),
67     llvm::cl::init(false), llvm::cl::Hidden);
68 // NOLINTNEXTLINE
69 opt<bool> output_mlir(
70     "output-mlir",
71     llvm::cl::desc(
72         "Output MLIR rather than FlatBuffer for the generated TFLite model"),
73     llvm::cl::init(false));
74 // NOLINTNEXTLINE
75 opt<bool> allow_all_select_tf_ops(
76     "allow-all-select-tf-ops",
77     llvm::cl::desc("Allow automatic pass through of TF ops (outside the flex "
78                    "allowlist) as select Tensorflow ops"),
79     llvm::cl::init(false));
80 
81 // The following approach allows injecting opdefs in addition
82 // to those that are already part of the global TF registry  to be linked in
83 // prior to importing the graph. The primary goal is for support of custom ops.
84 // This is not intended to be a general solution for custom ops for the future
85 // but mainly for supporting older models like mobilenet_ssd. More appropriate
86 // mechanisms, such as op hints or using functions to represent composable ops
87 // like https://github.com/tensorflow/community/pull/113 should be encouraged
88 // going forward.
89 // NOLINTNEXTLINE
90 llvm::cl::list<std::string> custom_opdefs(
91     "tf-custom-opdefs", llvm::cl::desc("List of custom opdefs when importing "
92                                        "graphdef"));
93 
94 // Quantize and Dequantize ops pair can be optionally emitted before and after
95 // the quantized model as the adaptors to receive and produce floating point
96 // type data with the quantized model. Set this to `false` if the model input is
97 // integer types.
98 // NOLINTNEXTLINE
99 opt<bool> emit_quant_adaptor_ops(
100     "emit-quant-adaptor-ops",
101     llvm::cl::desc(
102         "Emit Quantize/Dequantize before and after the generated TFLite model"),
103     llvm::cl::init(false));
104 
105 // The path to a quantization stats file to specify value ranges for some of the
106 // tensors with known names.
107 // NOLINTNEXTLINE
108 opt<std::string> quant_stats_file_name("quant-stats",
109                                        llvm::cl::desc("<stats file>"),
110                                        llvm::cl::value_desc("filename"),
111                                        llvm::cl::init(""));
112 
113 
114 // A list of comma separated TF operators which are created by the user.
115 // This must be used with `-emit-select-tf-ops=true`.
116 // NOLINTNEXTLINE
117 opt<std::string> select_user_tf_ops(
118     "select-user-tf-ops",
119     llvm::cl::desc(
120         "<list of custom tf ops created by the user (comma separated)>"),
121     llvm::cl::init(""));
122 
123 // NOLINTNEXTLINE
124 opt<bool> unfold_batchmatmul(
125     "unfold_batchmatmul",
126     llvm::cl::desc(
127         "Whether to unfold TF BatchMatMul to a set of TFL FullyConnected ops."),
128     llvm::cl::init(true));
129 
130 // NOLINTNEXTLINE
131 opt<bool> unfold_large_splat_constant(
132     "unfold-large-splat-constant",
133     llvm::cl::desc("Whether to unfold large splat constant tensors to reduce "
134                    "the generated model size."),
135     llvm::cl::init(false));
136 
137 // NOLINTNEXTLINE
138 opt<bool> guarantee_all_funcs_one_use(
139     "guarantee-all-funcs-one-use",
140     llvm::cl::desc(
141         "Whether to clone functions to ensure each function has a single use."),
142     llvm::cl::init(false));
143 
144 // NOLINTNEXTLINE
145 opt<bool> enable_dynamic_update_slice(
146     "enable-dynamic-update-slice",
147     llvm::cl::desc("Whether to enable dynamic update slice op to convert "
148                    "TensorListSetItem op."),
149     llvm::cl::init(false));
150 
151 // NOLINTNEXTLINE
152 opt<bool> import_hlo("import-hlo",
153                      llvm::cl::desc("Whether the input file is hlo file."),
154                      llvm::cl::init(false));
155 
156 // NOLINTNEXTLINE
157 opt<HloImportType> hlo_import_type(
158     "hlo-import-type", llvm::cl::desc("The file type of the hlo."),
159     llvm::cl::values(clEnumVal(proto, "Import hlo in proto binary format"),
160                      clEnumVal(hlotxt, "Import hlo in hlotxt format"),
161                      clEnumVal(mlir_text, "Import hlo in mlir_text format")));
162 
163 // NOLINTNEXTLINE
164 opt<bool> enable_hlo_to_tf_conversion(
165     "enable-hlo-to-tf-conversion",
166     llvm::cl::desc("Whether to enable the hlo to tf ops conversion."),
167     llvm::cl::init(false));
168 
169 // NOLINTNEXTLINE
170 opt<bool> preserve_assert_op(
171     "preserve-assert-op",
172     llvm::cl::desc("Preserve AssertOp during tfl legalization."),
173     llvm::cl::init(false));
174