xref: /aosp_15_r20/external/executorch/backends/cadence/aot/compiler.py (revision 523fa7a60841cd1ecfb9cc4201f1ca8b03ed023a)
1*523fa7a6SAndroid Build Coastguard Worker# Copyright (c) Meta Platforms, Inc. and affiliates.
2*523fa7a6SAndroid Build Coastguard Worker# All rights reserved.
3*523fa7a6SAndroid Build Coastguard Worker#
4*523fa7a6SAndroid Build Coastguard Worker# This source code is licensed under the BSD-style license found in the
5*523fa7a6SAndroid Build Coastguard Worker# LICENSE file in the root directory of this source tree.
6*523fa7a6SAndroid Build Coastguard Worker
7*523fa7a6SAndroid Build Coastguard Worker# pyre-strict
8*523fa7a6SAndroid Build Coastguard Worker
9*523fa7a6SAndroid Build Coastguard Workerimport logging
10*523fa7a6SAndroid Build Coastguard Workerfrom pathlib import Path
11*523fa7a6SAndroid Build Coastguard Workerfrom typing import Callable, cast, Optional
12*523fa7a6SAndroid Build Coastguard Worker
13*523fa7a6SAndroid Build Coastguard Workerimport executorch.backends.cadence.aot.ops_registrations  # noqa
14*523fa7a6SAndroid Build Coastguard Workerimport torch
15*523fa7a6SAndroid Build Coastguard Workerfrom executorch.backends.cadence.aot.quantizer.fusion_pass import QuantFusion
16*523fa7a6SAndroid Build Coastguard Workerfrom executorch.backends.cadence.aot.quantizer.quantizer import CadenceQuantizer
17*523fa7a6SAndroid Build Coastguard Worker
18*523fa7a6SAndroid Build Coastguard Workerfrom executorch.backends.cadence.aot.replace_ops import ReplaceSafeSoftmaxWithSoftmax
19*523fa7a6SAndroid Build Coastguard Workerfrom executorch.backends.cadence.aot.utils import model_gm_has_SDPA, model_is_quantized
20*523fa7a6SAndroid Build Coastguard Workerfrom executorch.backends.transforms.decompose_sdpa import (
21*523fa7a6SAndroid Build Coastguard Worker    DecomposeScaledDotProductAttention,
22*523fa7a6SAndroid Build Coastguard Worker)
23*523fa7a6SAndroid Build Coastguard Workerfrom executorch.devtools import generate_etrecord
24*523fa7a6SAndroid Build Coastguard Workerfrom executorch.exir import (
25*523fa7a6SAndroid Build Coastguard Worker    EdgeCompileConfig,
26*523fa7a6SAndroid Build Coastguard Worker    EdgeProgramManager,
27*523fa7a6SAndroid Build Coastguard Worker    ExecutorchProgramManager,
28*523fa7a6SAndroid Build Coastguard Worker    to_edge,
29*523fa7a6SAndroid Build Coastguard Worker)
30*523fa7a6SAndroid Build Coastguard Workerfrom executorch.exir.pass_base import PassResult
31*523fa7a6SAndroid Build Coastguard Workerfrom torch.ao.quantization.pt2e.export_utils import model_is_exported
32*523fa7a6SAndroid Build Coastguard Workerfrom torch.ao.quantization.quantize_pt2e import convert_pt2e, prepare_pt2e
33*523fa7a6SAndroid Build Coastguard Worker
34*523fa7a6SAndroid Build Coastguard Workerfrom torch.export import export
35*523fa7a6SAndroid Build Coastguard Workerfrom torch.export.exported_program import ExportedProgram
36*523fa7a6SAndroid Build Coastguard Worker
37*523fa7a6SAndroid Build Coastguard Workerfrom .passes import get_cadence_passes
38*523fa7a6SAndroid Build Coastguard Worker
39*523fa7a6SAndroid Build Coastguard Workerfrom .utils import print_ops_info
40*523fa7a6SAndroid Build Coastguard Worker
41*523fa7a6SAndroid Build Coastguard Worker
42*523fa7a6SAndroid Build Coastguard Worker# Note: this is not meant as a primary API since it can create inconsistencies
43*523fa7a6SAndroid Build Coastguard Worker# if the quantizer here is different from the quantizer used to convert. It is
44*523fa7a6SAndroid Build Coastguard Worker# however useful for unit tests to separate the converted model from the fused
45*523fa7a6SAndroid Build Coastguard Worker# model, to be able to get reference numerics.
46*523fa7a6SAndroid Build Coastguard Worker# If this does not apply, please use quantize_and_fuse_pt2 instead.
47*523fa7a6SAndroid Build Coastguard Workerdef convert_pt2(
48*523fa7a6SAndroid Build Coastguard Worker    model: torch.nn.Module,
49*523fa7a6SAndroid Build Coastguard Worker    inputs: tuple[object, ...],
50*523fa7a6SAndroid Build Coastguard Worker    quantizer: CadenceQuantizer,
51*523fa7a6SAndroid Build Coastguard Worker) -> torch.fx.GraphModule:
52*523fa7a6SAndroid Build Coastguard Worker    """
53*523fa7a6SAndroid Build Coastguard Worker    Prepare and convert a model using the given quantizer.
54*523fa7a6SAndroid Build Coastguard Worker    The quantizer must be supplied and be the same as the one used to
55*523fa7a6SAndroid Build Coastguard Worker    fuse the model later, if applicable. If you do not expect that behavior,
56*523fa7a6SAndroid Build Coastguard Worker    please use quantize_and_fuse_pt2 instead, which will instantiate a
57*523fa7a6SAndroid Build Coastguard Worker    default quantizer for you if needed.
58*523fa7a6SAndroid Build Coastguard Worker    Returns a GraphModule with the converted model.
59*523fa7a6SAndroid Build Coastguard Worker    """
60*523fa7a6SAndroid Build Coastguard Worker
61*523fa7a6SAndroid Build Coastguard Worker    # Export with dynamo
62*523fa7a6SAndroid Build Coastguard Worker    model_gm = torch.export.export_for_training(model, inputs).module()
63*523fa7a6SAndroid Build Coastguard Worker
64*523fa7a6SAndroid Build Coastguard Worker    if model_gm_has_SDPA(model_gm):  # pyre-fixme[6]
65*523fa7a6SAndroid Build Coastguard Worker        # Decompose SDPA
66*523fa7a6SAndroid Build Coastguard Worker        DecomposeScaledDotProductAttention(False)(model_gm)  # pyre-fixme[6]
67*523fa7a6SAndroid Build Coastguard Worker
68*523fa7a6SAndroid Build Coastguard Worker        # Swap _safe_softmax with _softmax (see https://github.com/pytorch/pytorch/pull/133882
69*523fa7a6SAndroid Build Coastguard Worker        # for details).
70*523fa7a6SAndroid Build Coastguard Worker        result = ReplaceSafeSoftmaxWithSoftmax()(model_gm)  # pyre-fixme[6]
71*523fa7a6SAndroid Build Coastguard Worker        assert result is not None
72*523fa7a6SAndroid Build Coastguard Worker        model_gm = result.graph_module
73*523fa7a6SAndroid Build Coastguard Worker
74*523fa7a6SAndroid Build Coastguard Worker    # Prepare
75*523fa7a6SAndroid Build Coastguard Worker    prepared_model = prepare_pt2e(model_gm, quantizer)
76*523fa7a6SAndroid Build Coastguard Worker
77*523fa7a6SAndroid Build Coastguard Worker    # Calibrate
78*523fa7a6SAndroid Build Coastguard Worker    prepared_model(*inputs)
79*523fa7a6SAndroid Build Coastguard Worker
80*523fa7a6SAndroid Build Coastguard Worker    # Convert
81*523fa7a6SAndroid Build Coastguard Worker    converted_model = convert_pt2e(prepared_model)
82*523fa7a6SAndroid Build Coastguard Worker
83*523fa7a6SAndroid Build Coastguard Worker    return converted_model
84*523fa7a6SAndroid Build Coastguard Worker
85*523fa7a6SAndroid Build Coastguard Worker
86*523fa7a6SAndroid Build Coastguard Worker# Note: this is not meant as a primary API since it can create inconsistencies
87*523fa7a6SAndroid Build Coastguard Worker# if the quantizer here is different from the quantizer used to convert. It is
88*523fa7a6SAndroid Build Coastguard Worker# however useful for unit tests to separate the converted model from the fused
89*523fa7a6SAndroid Build Coastguard Worker# model, to be able to get reference numerics.
90*523fa7a6SAndroid Build Coastguard Worker# If this does not apply, please use quantize_and_fuse_pt2 instead.
91*523fa7a6SAndroid Build Coastguard Workerdef fuse_pt2(
92*523fa7a6SAndroid Build Coastguard Worker    converted_graph_module: torch.fx.GraphModule,
93*523fa7a6SAndroid Build Coastguard Worker    quantizer: CadenceQuantizer,
94*523fa7a6SAndroid Build Coastguard Worker) -> torch.fx.GraphModule:
95*523fa7a6SAndroid Build Coastguard Worker    """
96*523fa7a6SAndroid Build Coastguard Worker    Fuse a converted graph module using the given quantizer.
97*523fa7a6SAndroid Build Coastguard Worker    The quantizer must be the same as the one used to convert the model.
98*523fa7a6SAndroid Build Coastguard Worker    If you do not expect that behavior, please use quantize_and_fuse_pt2 instead,
99*523fa7a6SAndroid Build Coastguard Worker    which will instantiate a default quantizer for you if needed.
100*523fa7a6SAndroid Build Coastguard Worker    Returns a GraphModule with the fused model.
101*523fa7a6SAndroid Build Coastguard Worker    """
102*523fa7a6SAndroid Build Coastguard Worker    # Get patterns and apply fusion of dq -> op -> q to qop
103*523fa7a6SAndroid Build Coastguard Worker    # pyre-ignore[16]: no attribute
104*523fa7a6SAndroid Build Coastguard Worker    patterns = [q.pattern for q in quantizer.quantizers]
105*523fa7a6SAndroid Build Coastguard Worker    QuantFusion(patterns)(converted_graph_module)
106*523fa7a6SAndroid Build Coastguard Worker
107*523fa7a6SAndroid Build Coastguard Worker    return converted_graph_module
108*523fa7a6SAndroid Build Coastguard Worker
109*523fa7a6SAndroid Build Coastguard Worker
110*523fa7a6SAndroid Build Coastguard Worker# Note: this is the one-liner API to quantize and fuse a model.
111*523fa7a6SAndroid Build Coastguard Workerdef quantize_pt2(
112*523fa7a6SAndroid Build Coastguard Worker    model: torch.nn.Module,
113*523fa7a6SAndroid Build Coastguard Worker    inputs: tuple[object, ...],
114*523fa7a6SAndroid Build Coastguard Worker    quantizer: Optional[CadenceQuantizer] = None,
115*523fa7a6SAndroid Build Coastguard Worker) -> torch.fx.GraphModule:
116*523fa7a6SAndroid Build Coastguard Worker    """
117*523fa7a6SAndroid Build Coastguard Worker    Prepare, convert and fuse the model using the given quantizer.
118*523fa7a6SAndroid Build Coastguard Worker    Returns a GraphModule with the quantized model.
119*523fa7a6SAndroid Build Coastguard Worker    """
120*523fa7a6SAndroid Build Coastguard Worker    # Quantizer
121*523fa7a6SAndroid Build Coastguard Worker    if not quantizer:
122*523fa7a6SAndroid Build Coastguard Worker        quantizer = CadenceQuantizer()
123*523fa7a6SAndroid Build Coastguard Worker
124*523fa7a6SAndroid Build Coastguard Worker    # Get converted graph module
125*523fa7a6SAndroid Build Coastguard Worker    converted_gm = convert_pt2(model, inputs, quantizer)
126*523fa7a6SAndroid Build Coastguard Worker
127*523fa7a6SAndroid Build Coastguard Worker    # Get fused model
128*523fa7a6SAndroid Build Coastguard Worker    fused_gm = fuse_pt2(converted_gm, quantizer)
129*523fa7a6SAndroid Build Coastguard Worker
130*523fa7a6SAndroid Build Coastguard Worker    return fused_gm
131*523fa7a6SAndroid Build Coastguard Worker
132*523fa7a6SAndroid Build Coastguard Worker
133*523fa7a6SAndroid Build Coastguard Worker# Export the model and lower it to an ExportedProgram (in aten IR)
134*523fa7a6SAndroid Build Coastguard Workerdef export_program(
135*523fa7a6SAndroid Build Coastguard Worker    model: torch.nn.Module,
136*523fa7a6SAndroid Build Coastguard Worker    inputs: tuple[object, ...],
137*523fa7a6SAndroid Build Coastguard Worker    dump_graphs: bool = False,
138*523fa7a6SAndroid Build Coastguard Worker) -> ExportedProgram:
139*523fa7a6SAndroid Build Coastguard Worker    assert isinstance(model, torch.nn.Module), "model should be an nn.Module"
140*523fa7a6SAndroid Build Coastguard Worker
141*523fa7a6SAndroid Build Coastguard Worker    # We don't support training mode. Make the model inference mode by
142*523fa7a6SAndroid Build Coastguard Worker    # calling model.eval() or an equivalent call for quantized models.
143*523fa7a6SAndroid Build Coastguard Worker    # GraphModules cannot call eval(), so we skip them.
144*523fa7a6SAndroid Build Coastguard Worker    if not isinstance(model, torch.fx.GraphModule):
145*523fa7a6SAndroid Build Coastguard Worker        if hasattr(model, "eval"):
146*523fa7a6SAndroid Build Coastguard Worker            model.eval()
147*523fa7a6SAndroid Build Coastguard Worker    else:
148*523fa7a6SAndroid Build Coastguard Worker        # If the model is quantized, call the suggested torch.ao.quantization API
149*523fa7a6SAndroid Build Coastguard Worker        # which only does dropout and batchnorm.
150*523fa7a6SAndroid Build Coastguard Worker        if model_is_quantized(model):
151*523fa7a6SAndroid Build Coastguard Worker            torch.ao.quantization.move_exported_model_to_eval(model)
152*523fa7a6SAndroid Build Coastguard Worker        else:
153*523fa7a6SAndroid Build Coastguard Worker            # If we get a GraphModule which is _not_ quantized, then it should already
154*523fa7a6SAndroid Build Coastguard Worker            # have been exported.
155*523fa7a6SAndroid Build Coastguard Worker            assert model_is_exported(model), "model should be from an ExportedProgram"
156*523fa7a6SAndroid Build Coastguard Worker
157*523fa7a6SAndroid Build Coastguard Worker    # Prevent mkldnn decompositions
158*523fa7a6SAndroid Build Coastguard Worker    torch._C._set_mkldnn_enabled(False)
159*523fa7a6SAndroid Build Coastguard Worker
160*523fa7a6SAndroid Build Coastguard Worker    # else: capture the model and return it.
161*523fa7a6SAndroid Build Coastguard Worker    expo_program = export(model, inputs)
162*523fa7a6SAndroid Build Coastguard Worker
163*523fa7a6SAndroid Build Coastguard Worker    if dump_graphs:
164*523fa7a6SAndroid Build Coastguard Worker        logging.info("Exported graph:")
165*523fa7a6SAndroid Build Coastguard Worker        expo_program.graph_module.graph.print_tabular()
166*523fa7a6SAndroid Build Coastguard Worker
167*523fa7a6SAndroid Build Coastguard Worker    return expo_program
168*523fa7a6SAndroid Build Coastguard Worker
169*523fa7a6SAndroid Build Coastguard Worker
170*523fa7a6SAndroid Build Coastguard Worker# Export the model and lower it to an EdgeProgramManager (in edge IR).
171*523fa7a6SAndroid Build Coastguard Workerdef export_to_edge(
172*523fa7a6SAndroid Build Coastguard Worker    model: torch.nn.Module,
173*523fa7a6SAndroid Build Coastguard Worker    inputs: tuple[object, ...],
174*523fa7a6SAndroid Build Coastguard Worker    dump_graphs: bool = False,
175*523fa7a6SAndroid Build Coastguard Worker) -> EdgeProgramManager:
176*523fa7a6SAndroid Build Coastguard Worker    assert isinstance(model, torch.nn.Module), "model should be an nn.Module"
177*523fa7a6SAndroid Build Coastguard Worker
178*523fa7a6SAndroid Build Coastguard Worker    # Export the model into an ExportedProgram.
179*523fa7a6SAndroid Build Coastguard Worker    expo_program = export_program(model, inputs, dump_graphs=dump_graphs)
180*523fa7a6SAndroid Build Coastguard Worker
181*523fa7a6SAndroid Build Coastguard Worker    # Call to_edge to convert the graph to edge IR.
182*523fa7a6SAndroid Build Coastguard Worker    # Note: dim_order is skipped (https://github.com/pytorch/executorch/issues/3704)
183*523fa7a6SAndroid Build Coastguard Worker    edge_prog_manager = to_edge(
184*523fa7a6SAndroid Build Coastguard Worker        expo_program,
185*523fa7a6SAndroid Build Coastguard Worker        compile_config=EdgeCompileConfig(
186*523fa7a6SAndroid Build Coastguard Worker            _check_ir_validity=False, _skip_dim_order=True
187*523fa7a6SAndroid Build Coastguard Worker        ),
188*523fa7a6SAndroid Build Coastguard Worker    )
189*523fa7a6SAndroid Build Coastguard Worker
190*523fa7a6SAndroid Build Coastguard Worker    if dump_graphs:
191*523fa7a6SAndroid Build Coastguard Worker        logging.info("Edge graph:")
192*523fa7a6SAndroid Build Coastguard Worker        edge_prog_manager.exported_program().graph_module.graph.print_tabular()
193*523fa7a6SAndroid Build Coastguard Worker
194*523fa7a6SAndroid Build Coastguard Worker    return edge_prog_manager
195*523fa7a6SAndroid Build Coastguard Worker
196*523fa7a6SAndroid Build Coastguard Worker
197*523fa7a6SAndroid Build Coastguard Workerdef export_to_cadence(
198*523fa7a6SAndroid Build Coastguard Worker    model: torch.nn.Module,
199*523fa7a6SAndroid Build Coastguard Worker    inputs: tuple[object, ...],
200*523fa7a6SAndroid Build Coastguard Worker    dump_graphs: bool = False,
201*523fa7a6SAndroid Build Coastguard Worker    output_dir: Optional[str] = None,
202*523fa7a6SAndroid Build Coastguard Worker    opt_level: int = 1,
203*523fa7a6SAndroid Build Coastguard Worker) -> EdgeProgramManager:
204*523fa7a6SAndroid Build Coastguard Worker    edge_prog_manager = export_to_edge(model, inputs)
205*523fa7a6SAndroid Build Coastguard Worker    cadence_passes = get_cadence_passes(opt_level)
206*523fa7a6SAndroid Build Coastguard Worker
207*523fa7a6SAndroid Build Coastguard Worker    # Run a couple required passes for quant/dequant ops
208*523fa7a6SAndroid Build Coastguard Worker    cadence_prog_manager = edge_prog_manager.transform(
209*523fa7a6SAndroid Build Coastguard Worker        cast(
210*523fa7a6SAndroid Build Coastguard Worker            list[Callable[[torch.fx.GraphModule], Optional[PassResult]]], cadence_passes
211*523fa7a6SAndroid Build Coastguard Worker        )
212*523fa7a6SAndroid Build Coastguard Worker    )
213*523fa7a6SAndroid Build Coastguard Worker    return cadence_prog_manager
214*523fa7a6SAndroid Build Coastguard Worker
215*523fa7a6SAndroid Build Coastguard Worker
216*523fa7a6SAndroid Build Coastguard Workerdef quantize_and_export_to_cadence(
217*523fa7a6SAndroid Build Coastguard Worker    model: torch.nn.Module,
218*523fa7a6SAndroid Build Coastguard Worker    inputs: tuple[object, ...],
219*523fa7a6SAndroid Build Coastguard Worker    dump_graphs: bool = False,
220*523fa7a6SAndroid Build Coastguard Worker    opt_level: int = 1,
221*523fa7a6SAndroid Build Coastguard Worker) -> EdgeProgramManager:
222*523fa7a6SAndroid Build Coastguard Worker    quantized_model = quantize_pt2(model, inputs)
223*523fa7a6SAndroid Build Coastguard Worker
224*523fa7a6SAndroid Build Coastguard Worker    return export_to_cadence(
225*523fa7a6SAndroid Build Coastguard Worker        quantized_model,
226*523fa7a6SAndroid Build Coastguard Worker        inputs,
227*523fa7a6SAndroid Build Coastguard Worker        opt_level=opt_level,
228*523fa7a6SAndroid Build Coastguard Worker        dump_graphs=dump_graphs,
229*523fa7a6SAndroid Build Coastguard Worker    )
230*523fa7a6SAndroid Build Coastguard Worker
231*523fa7a6SAndroid Build Coastguard Worker
232*523fa7a6SAndroid Build Coastguard Worker# Export the model and lower it to an EdgeProgramManager (in edge IR), and
233*523fa7a6SAndroid Build Coastguard Worker# apply passes specific to Cadence DSP execution. Return both to print the
234*523fa7a6SAndroid Build Coastguard Worker# differences.
235*523fa7a6SAndroid Build Coastguard Workerdef export_to_executorch_gen_etrecord(
236*523fa7a6SAndroid Build Coastguard Worker    model: torch.nn.Module,
237*523fa7a6SAndroid Build Coastguard Worker    inputs: tuple[object, ...],
238*523fa7a6SAndroid Build Coastguard Worker    dump_graphs: bool = False,
239*523fa7a6SAndroid Build Coastguard Worker    output_dir: Optional[str] = None,
240*523fa7a6SAndroid Build Coastguard Worker    opt_level: int = 1,
241*523fa7a6SAndroid Build Coastguard Worker) -> ExecutorchProgramManager:
242*523fa7a6SAndroid Build Coastguard Worker    edge_prog_manager = export_to_edge(model, inputs)
243*523fa7a6SAndroid Build Coastguard Worker    cadence_passes = get_cadence_passes(opt_level)
244*523fa7a6SAndroid Build Coastguard Worker
245*523fa7a6SAndroid Build Coastguard Worker    # Run a couple required passes for quant/dequant ops
246*523fa7a6SAndroid Build Coastguard Worker    cadence_prog_manager = edge_prog_manager.transform(
247*523fa7a6SAndroid Build Coastguard Worker        cast(
248*523fa7a6SAndroid Build Coastguard Worker            list[Callable[[torch.fx.GraphModule], Optional[PassResult]]], cadence_passes
249*523fa7a6SAndroid Build Coastguard Worker        )
250*523fa7a6SAndroid Build Coastguard Worker    )
251*523fa7a6SAndroid Build Coastguard Worker
252*523fa7a6SAndroid Build Coastguard Worker    # Print some information to terminal
253*523fa7a6SAndroid Build Coastguard Worker    print_ops_info(
254*523fa7a6SAndroid Build Coastguard Worker        edge_prog_manager.exported_program().graph_module,
255*523fa7a6SAndroid Build Coastguard Worker        cadence_prog_manager.exported_program().graph_module,
256*523fa7a6SAndroid Build Coastguard Worker    )
257*523fa7a6SAndroid Build Coastguard Worker
258*523fa7a6SAndroid Build Coastguard Worker    # Get executorch program after Cadence specific passes
259*523fa7a6SAndroid Build Coastguard Worker    exec_prog: ExecutorchProgramManager = cadence_prog_manager.to_executorch()
260*523fa7a6SAndroid Build Coastguard Worker    if output_dir:
261*523fa7a6SAndroid Build Coastguard Worker        _gen_etrecord(edge_prog_manager, exec_prog, Path(output_dir))
262*523fa7a6SAndroid Build Coastguard Worker    else:
263*523fa7a6SAndroid Build Coastguard Worker        logging.warning("No output directory provided, skipping ETRecord generation")
264*523fa7a6SAndroid Build Coastguard Worker
265*523fa7a6SAndroid Build Coastguard Worker    return exec_prog
266*523fa7a6SAndroid Build Coastguard Worker
267*523fa7a6SAndroid Build Coastguard Worker
268*523fa7a6SAndroid Build Coastguard Workerdef _gen_etrecord(
269*523fa7a6SAndroid Build Coastguard Worker    edge_program: EdgeProgramManager,
270*523fa7a6SAndroid Build Coastguard Worker    et_program: ExecutorchProgramManager,
271*523fa7a6SAndroid Build Coastguard Worker    output_dir: Path,
272*523fa7a6SAndroid Build Coastguard Worker) -> None:
273*523fa7a6SAndroid Build Coastguard Worker    etrec_path = output_dir / "etrecord.bin"
274*523fa7a6SAndroid Build Coastguard Worker    try:
275*523fa7a6SAndroid Build Coastguard Worker        generate_etrecord(
276*523fa7a6SAndroid Build Coastguard Worker            et_record=etrec_path,
277*523fa7a6SAndroid Build Coastguard Worker            edge_dialect_program=edge_program,
278*523fa7a6SAndroid Build Coastguard Worker            executorch_program=et_program,
279*523fa7a6SAndroid Build Coastguard Worker        )
280*523fa7a6SAndroid Build Coastguard Worker        logging.info(f"Generated ETRecord at {etrec_path}")
281*523fa7a6SAndroid Build Coastguard Worker    except Exception:
282*523fa7a6SAndroid Build Coastguard Worker        # Any errors here shouldn't block the rest of the flow
283*523fa7a6SAndroid Build Coastguard Worker        logging.exception("Encountered exception while generating ETRecord")
284