We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 8089054 commit 6b3f61aCopy full SHA for 6b3f61a
core/shark_turbine/aot/builtins/jittable.py
@@ -213,12 +213,12 @@ def flat_wrapped_f(*args):
213
if "functorch_functionalize" in self._passes:
214
transformed_f = functorch_functionalize(transformed_f, *flat_pytorch_args)
215
216
- for node in transformed_f.graph.nodes:
+ for node in transformed_f.graph.nodes: # type: ignore
217
if node.op == "call_function":
218
if node.target == torch._ops.ops.aten.lift_fresh_copy.default:
219
print(f"replaced lift_fresh_copy")
220
node.target = torch._ops.ops.aten.clone.default
221
- transformed_f.recompile()
+ transformed_f.recompile() # type: ignore
222
223
# Ask dynamo to give us an aten graph.
224
# TODO: Cache this for repeated calls.
0 commit comments