diff --git a/backends/cadence/aot/compiler.py b/backends/cadence/aot/compiler.py index e1494f8d20..fe8fc72124 100644 --- a/backends/cadence/aot/compiler.py +++ b/backends/cadence/aot/compiler.py @@ -30,7 +30,6 @@ ) from executorch.backends.transforms.remove_clone_ops import RemoveCloneOpsTransform from executorch.exir import EdgeCompileConfig, EdgeProgramManager, to_edge -from torch._export import capture_pre_autograd_graph from torch.ao.quantization.pt2e.export_utils import model_is_exported from torch.ao.quantization.quantize_pt2e import convert_pt2e, prepare_pt2e @@ -58,7 +57,7 @@ def convert_pt2( """ # Export with dynamo - model_gm = capture_pre_autograd_graph(model, inputs) + model_gm = torch.export.export_for_training(model, inputs).module() if model_gm_has_SDPA(model_gm): # pyre-fixme[6] # Decompose SDPA