python.closure ================== cond_closed_over_variable ^^^^^^^^^^^^^^^^^^^^^^^^^ .. note:: Tags: :doc:`torch.cond `, :doc:`python.closure ` Support Level: SUPPORTED Original source code: .. code-block:: python import torch from functorch.experimental.control_flow import cond class CondClosedOverVariable(torch.nn.Module): """ torch.cond() supports branches closed over arbitrary variables. """ def forward(self, pred, x): def true_fn(val): return x * 2 def false_fn(val): return x - 2 return cond(pred, true_fn, false_fn, [x + 1]) Result: .. code-block:: ExportedProgram: class GraphModule(torch.nn.Module): def forward(self, l_pred_: "b8[]", l_x_: "f32[3, 2]"): true_graph_0 = self.true_graph_0 false_graph_0 = self.false_graph_0 conditional = torch.ops.higher_order.cond(l_pred_, true_graph_0, false_graph_0, [l_x_]); l_pred_ = true_graph_0 = false_graph_0 = l_x_ = None getitem: "f32[3, 2]" = conditional[0]; conditional = None return (getitem,) class (torch.nn.Module): def forward(self, arg0_1: "f32[3, 2]"): mul: "f32[3, 2]" = torch.ops.aten.mul.Tensor(arg0_1, 2); arg0_1 = None return (mul,) class (torch.nn.Module): def forward(self, arg0_1: "f32[3, 2]"): sub: "f32[3, 2]" = torch.ops.aten.sub.Tensor(arg0_1, 2); arg0_1 = None return (sub,) Graph signature: ExportGraphSignature(input_specs=[InputSpec(kind=, arg=TensorArgument(name='l_pred_'), target=None), InputSpec(kind=, arg=TensorArgument(name='l_x_'), target=None)], output_specs=[OutputSpec(kind=, arg=TensorArgument(name='getitem'), target=None)]) Range constraints: {} Equality constraints: [] nested_function ^^^^^^^^^^^^^^^ .. note:: Tags: :doc:`python.closure ` Support Level: SUPPORTED Original source code: .. code-block:: python import torch def nested_function(a, b): """ Nested functions are traced through. Side effects on global captures are not supported though. """ x = a + b z = a - b def closure(y): nonlocal x x += 1 return x * y + z return closure(x) Result: .. code-block:: ExportedProgram: class GraphModule(torch.nn.Module): def forward(self, l_a_: "f32[3, 2]", l_b_: "f32[2]"): add: "f32[3, 2]" = torch.ops.aten.add.Tensor(l_a_, l_b_) sub: "f32[3, 2]" = torch.ops.aten.sub.Tensor(l_a_, l_b_); l_a_ = l_b_ = None add_1: "f32[3, 2]" = torch.ops.aten.add.Tensor(add, 1); add = None mul: "f32[3, 2]" = torch.ops.aten.mul.Tensor(add_1, add_1); add_1 = None add_2: "f32[3, 2]" = torch.ops.aten.add.Tensor(mul, sub); mul = sub = None return (add_2,) Graph signature: ExportGraphSignature(input_specs=[InputSpec(kind=, arg=TensorArgument(name='l_a_'), target=None), InputSpec(kind=, arg=TensorArgument(name='l_b_'), target=None)], output_specs=[OutputSpec(kind=, arg=TensorArgument(name='add_2'), target=None)]) Range constraints: {} Equality constraints: []