diff --git a/core/shark_turbine/aot/builtins/jittable.py b/core/shark_turbine/aot/builtins/jittable.py index 2d774ee70..d14834386 100644 --- a/core/shark_turbine/aot/builtins/jittable.py +++ b/core/shark_turbine/aot/builtins/jittable.py @@ -213,12 +213,12 @@ def flat_wrapped_f(*args): if "functorch_functionalize" in self._passes: transformed_f = functorch_functionalize(transformed_f, *flat_pytorch_args) - for node in transformed_f.graph.nodes: # type: ignore + for node in transformed_f.graph.nodes: # type: ignore if node.op == "call_function": if node.target == torch._ops.ops.aten.lift_fresh_copy.default: print(f"replaced lift_fresh_copy") node.target = torch._ops.ops.aten.clone.default - transformed_f.recompile() # type: ignore + transformed_f.recompile() # type: ignore # Ask dynamo to give us an aten graph. # TODO: Cache this for repeated calls.