diff --git a/main/.buildinfo b/main/.buildinfo index c9b77919432..ac64b003d6b 100644 --- a/main/.buildinfo +++ b/main/.buildinfo @@ -1,4 +1,4 @@ # Sphinx build info version 1 # This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done. -config: 1549e35e62c289dc3dfba0d0ea615247 +config: 686f316830a6a9ad7caaeab3d3d10b96 tags: 645f666f9bcd5a90fca523b33c5a78b7 diff --git a/main/_images/RReLU.png b/main/_images/RReLU.png index 730cdfeca2f..e03bdabd8d2 100644 Binary files a/main/_images/RReLU.png and b/main/_images/RReLU.png differ diff --git a/main/_images/ReduceLROnPlateau.png b/main/_images/ReduceLROnPlateau.png index 05bc87405c2..6568cac2ccb 100644 Binary files a/main/_images/ReduceLROnPlateau.png and b/main/_images/ReduceLROnPlateau.png differ diff --git a/main/_sources/autograd.md.txt b/main/_sources/autograd.md.txt index e78b77e4eb4..4218eac05d7 100644 --- a/main/_sources/autograd.md.txt +++ b/main/_sources/autograd.md.txt @@ -423,10 +423,8 @@ Also see {ref}`saved-tensors-hooks-doc`. ```{eval-rst} .. autofunction:: torch.autograd.graph.get_gradient_edge -``` -```{eval-rst} -.. autofunction:: torch.autograd.graph.set_warn_on_accumulate_grad_stream_mismatch + ``` % This module needs to be documented. Adding here in the meantime diff --git a/main/_sources/generated/exportdb/index.rst.txt b/main/_sources/generated/exportdb/index.rst.txt index 835ae31c4a2..c6317c792ac 100644 --- a/main/_sources/generated/exportdb/index.rst.txt +++ b/main/_sources/generated/exportdb/index.rst.txt @@ -500,7 +500,7 @@ cond_closed_over_variable .. note:: - Tags: :doc:`python.closure `, :doc:`torch.cond ` + Tags: :doc:`torch.cond `, :doc:`python.closure ` Support Level: SUPPORTED @@ -629,26 +629,26 @@ Result: ExportedProgram: class GraphModule(torch.nn.Module): - def forward(self, x: "f32[s77, 2]", y: "f32[2]"): + def forward(self, x: "f32[s6, 2]", y: "f32[2]"): # - sym_size_int_1: "Sym(s77)" = torch.ops.aten.sym_size.int(x, 0) + sym_size_int_1: "Sym(s6)" = torch.ops.aten.sym_size.int(x, 0) - gt: "Sym(s77 > 2)" = sym_size_int_1 > 2; sym_size_int_1 = None + gt: "Sym(s6 > 2)" = sym_size_int_1 > 2; sym_size_int_1 = None true_graph_0 = self.true_graph_0 false_graph_0 = self.false_graph_0 cond = torch.ops.higher_order.cond(gt, true_graph_0, false_graph_0, (x, y)); gt = true_graph_0 = false_graph_0 = x = y = None - getitem: "f32[s77, 2]" = cond[0]; cond = None + getitem: "f32[s6, 2]" = cond[0]; cond = None return (getitem,) class true_graph_0(torch.nn.Module): - def forward(self, x: "f32[s77, 2]", y: "f32[2]"): - add: "f32[s77, 2]" = torch.ops.aten.add.Tensor(x, y); x = y = None + def forward(self, x: "f32[s6, 2]", y: "f32[2]"): + add: "f32[s6, 2]" = torch.ops.aten.add.Tensor(x, y); x = y = None return (add,) class false_graph_0(torch.nn.Module): - def forward(self, x: "f32[s77, 2]", y: "f32[2]"): - sub: "f32[s77, 2]" = torch.ops.aten.sub.Tensor(x, y); x = y = None + def forward(self, x: "f32[s6, 2]", y: "f32[2]"): + sub: "f32[s6, 2]" = torch.ops.aten.sub.Tensor(x, y); x = y = None return (sub,) Graph signature: @@ -659,7 +659,7 @@ Result: # outputs getitem: USER_OUTPUT - Range constraints: {s77: VR[0, int_oo]} + Range constraints: {s6: VR[0, int_oo]} @@ -1099,7 +1099,7 @@ dynamic_shape_if_guard .. note:: - Tags: :doc:`torch.dynamic-shape `, :doc:`python.control-flow ` + Tags: :doc:`python.control-flow `, :doc:`torch.dynamic-shape ` Support Level: SUPPORTED @@ -1416,7 +1416,7 @@ list_contains .. note:: - Tags: :doc:`torch.dynamic-shape `, :doc:`python.data-structure `, :doc:`python.assert ` + Tags: :doc:`torch.dynamic-shape `, :doc:`python.assert `, :doc:`python.data-structure ` Support Level: SUPPORTED @@ -1765,7 +1765,22 @@ Result: .. code-block:: - Unsupported: Tracing through optional input is not supported yet + ExportedProgram: + class GraphModule(torch.nn.Module): + def forward(self, c_dict_type_l__self______dict_____forward_____defaults___0: "f32[2, 3]", x: "f32[2, 3]"): + add: "f32[2, 3]" = torch.ops.aten.add.Tensor(x, c_dict_type_l__self______dict_____forward_____defaults___0); x = c_dict_type_l__self______dict_____forward_____defaults___0 = None + return (add,) + + Graph signature: + # inputs + c_dict_type_l__self______dict_____forward_____defaults___0: CONSTANT_TENSOR target='dict_type_L__self______dict_____forward_____defaults___0' persistent=True + x: USER_INPUT + + # outputs + add: USER_OUTPUT + + Range constraints: {} + pytree_flatten @@ -1869,11 +1884,11 @@ Result: ExportedProgram: class GraphModule(torch.nn.Module): - def forward(self, x: "f32[3, s27]"): + def forward(self, x: "f32[3, s11]"): # - sym_size_int_1: "Sym(s27)" = torch.ops.aten.sym_size.int(x, 1); x = None + sym_size_int_1: "Sym(s11)" = torch.ops.aten.sym_size.int(x, 1); x = None - add: "Sym(s27 + 1)" = sym_size_int_1 + 1; sym_size_int_1 = None + add: "Sym(s11 + 1)" = sym_size_int_1 + 1; sym_size_int_1 = None return (add,) Graph signature: @@ -1883,7 +1898,7 @@ Result: # outputs add: USER_OUTPUT - Range constraints: {s27: VR[0, int_oo]} + Range constraints: {s11: VR[0, int_oo]} diff --git a/main/_sources/generated/exportdb/python.assert.rst.txt b/main/_sources/generated/exportdb/python.assert.rst.txt index 97bddd3d7fc..f777b465a60 100644 --- a/main/_sources/generated/exportdb/python.assert.rst.txt +++ b/main/_sources/generated/exportdb/python.assert.rst.txt @@ -60,7 +60,7 @@ list_contains .. note:: - Tags: :doc:`torch.dynamic-shape `, :doc:`python.data-structure `, :doc:`python.assert ` + Tags: :doc:`torch.dynamic-shape `, :doc:`python.assert `, :doc:`python.data-structure ` Support Level: SUPPORTED diff --git a/main/_sources/generated/exportdb/python.closure.rst.txt b/main/_sources/generated/exportdb/python.closure.rst.txt index 0039f82ba09..1f9e0755b95 100644 --- a/main/_sources/generated/exportdb/python.closure.rst.txt +++ b/main/_sources/generated/exportdb/python.closure.rst.txt @@ -5,7 +5,7 @@ cond_closed_over_variable .. note:: - Tags: :doc:`python.closure `, :doc:`torch.cond ` + Tags: :doc:`torch.cond `, :doc:`python.closure ` Support Level: SUPPORTED diff --git a/main/_sources/generated/exportdb/python.control-flow.rst.txt b/main/_sources/generated/exportdb/python.control-flow.rst.txt index 42fc87d9c38..74907a8cae7 100644 --- a/main/_sources/generated/exportdb/python.control-flow.rst.txt +++ b/main/_sources/generated/exportdb/python.control-flow.rst.txt @@ -5,7 +5,7 @@ dynamic_shape_if_guard .. note:: - Tags: :doc:`torch.dynamic-shape `, :doc:`python.control-flow ` + Tags: :doc:`python.control-flow `, :doc:`torch.dynamic-shape ` Support Level: SUPPORTED diff --git a/main/_sources/generated/exportdb/python.data-structure.rst.txt b/main/_sources/generated/exportdb/python.data-structure.rst.txt index a181633e2b1..19e571f2155 100644 --- a/main/_sources/generated/exportdb/python.data-structure.rst.txt +++ b/main/_sources/generated/exportdb/python.data-structure.rst.txt @@ -147,7 +147,7 @@ list_contains .. note:: - Tags: :doc:`torch.dynamic-shape `, :doc:`python.data-structure `, :doc:`python.assert ` + Tags: :doc:`torch.dynamic-shape `, :doc:`python.assert `, :doc:`python.data-structure ` Support Level: SUPPORTED diff --git a/main/_sources/generated/exportdb/python.object-model.rst.txt b/main/_sources/generated/exportdb/python.object-model.rst.txt index 51347d2ce1a..424082aa4c0 100644 --- a/main/_sources/generated/exportdb/python.object-model.rst.txt +++ b/main/_sources/generated/exportdb/python.object-model.rst.txt @@ -108,4 +108,19 @@ Result: .. code-block:: - Unsupported: Tracing through optional input is not supported yet + ExportedProgram: + class GraphModule(torch.nn.Module): + def forward(self, c_dict_type_l__self______dict_____forward_____defaults___0: "f32[2, 3]", x: "f32[2, 3]"): + add: "f32[2, 3]" = torch.ops.aten.add.Tensor(x, c_dict_type_l__self______dict_____forward_____defaults___0); x = c_dict_type_l__self______dict_____forward_____defaults___0 = None + return (add,) + + Graph signature: + # inputs + c_dict_type_l__self______dict_____forward_____defaults___0: CONSTANT_TENSOR target='dict_type_L__self______dict_____forward_____defaults___0' persistent=True + x: USER_INPUT + + # outputs + add: USER_OUTPUT + + Range constraints: {} + diff --git a/main/_sources/generated/exportdb/torch.cond.rst.txt b/main/_sources/generated/exportdb/torch.cond.rst.txt index 74dc66e7577..1fec9c364bd 100644 --- a/main/_sources/generated/exportdb/torch.cond.rst.txt +++ b/main/_sources/generated/exportdb/torch.cond.rst.txt @@ -270,7 +270,7 @@ cond_closed_over_variable .. note:: - Tags: :doc:`python.closure `, :doc:`torch.cond ` + Tags: :doc:`torch.cond `, :doc:`python.closure ` Support Level: SUPPORTED @@ -399,26 +399,26 @@ Result: ExportedProgram: class GraphModule(torch.nn.Module): - def forward(self, x: "f32[s77, 2]", y: "f32[2]"): + def forward(self, x: "f32[s6, 2]", y: "f32[2]"): # - sym_size_int_1: "Sym(s77)" = torch.ops.aten.sym_size.int(x, 0) + sym_size_int_1: "Sym(s6)" = torch.ops.aten.sym_size.int(x, 0) - gt: "Sym(s77 > 2)" = sym_size_int_1 > 2; sym_size_int_1 = None + gt: "Sym(s6 > 2)" = sym_size_int_1 > 2; sym_size_int_1 = None true_graph_0 = self.true_graph_0 false_graph_0 = self.false_graph_0 cond = torch.ops.higher_order.cond(gt, true_graph_0, false_graph_0, (x, y)); gt = true_graph_0 = false_graph_0 = x = y = None - getitem: "f32[s77, 2]" = cond[0]; cond = None + getitem: "f32[s6, 2]" = cond[0]; cond = None return (getitem,) class true_graph_0(torch.nn.Module): - def forward(self, x: "f32[s77, 2]", y: "f32[2]"): - add: "f32[s77, 2]" = torch.ops.aten.add.Tensor(x, y); x = y = None + def forward(self, x: "f32[s6, 2]", y: "f32[2]"): + add: "f32[s6, 2]" = torch.ops.aten.add.Tensor(x, y); x = y = None return (add,) class false_graph_0(torch.nn.Module): - def forward(self, x: "f32[s77, 2]", y: "f32[2]"): - sub: "f32[s77, 2]" = torch.ops.aten.sub.Tensor(x, y); x = y = None + def forward(self, x: "f32[s6, 2]", y: "f32[2]"): + sub: "f32[s6, 2]" = torch.ops.aten.sub.Tensor(x, y); x = y = None return (sub,) Graph signature: @@ -429,7 +429,7 @@ Result: # outputs getitem: USER_OUTPUT - Range constraints: {s77: VR[0, int_oo]} + Range constraints: {s6: VR[0, int_oo]} diff --git a/main/_sources/generated/exportdb/torch.dynamic-shape.rst.txt b/main/_sources/generated/exportdb/torch.dynamic-shape.rst.txt index d8ba7005452..a7bb324b31a 100644 --- a/main/_sources/generated/exportdb/torch.dynamic-shape.rst.txt +++ b/main/_sources/generated/exportdb/torch.dynamic-shape.rst.txt @@ -323,26 +323,26 @@ Result: ExportedProgram: class GraphModule(torch.nn.Module): - def forward(self, x: "f32[s77, 2]", y: "f32[2]"): + def forward(self, x: "f32[s6, 2]", y: "f32[2]"): # - sym_size_int_1: "Sym(s77)" = torch.ops.aten.sym_size.int(x, 0) + sym_size_int_1: "Sym(s6)" = torch.ops.aten.sym_size.int(x, 0) - gt: "Sym(s77 > 2)" = sym_size_int_1 > 2; sym_size_int_1 = None + gt: "Sym(s6 > 2)" = sym_size_int_1 > 2; sym_size_int_1 = None true_graph_0 = self.true_graph_0 false_graph_0 = self.false_graph_0 cond = torch.ops.higher_order.cond(gt, true_graph_0, false_graph_0, (x, y)); gt = true_graph_0 = false_graph_0 = x = y = None - getitem: "f32[s77, 2]" = cond[0]; cond = None + getitem: "f32[s6, 2]" = cond[0]; cond = None return (getitem,) class true_graph_0(torch.nn.Module): - def forward(self, x: "f32[s77, 2]", y: "f32[2]"): - add: "f32[s77, 2]" = torch.ops.aten.add.Tensor(x, y); x = y = None + def forward(self, x: "f32[s6, 2]", y: "f32[2]"): + add: "f32[s6, 2]" = torch.ops.aten.add.Tensor(x, y); x = y = None return (add,) class false_graph_0(torch.nn.Module): - def forward(self, x: "f32[s77, 2]", y: "f32[2]"): - sub: "f32[s77, 2]" = torch.ops.aten.sub.Tensor(x, y); x = y = None + def forward(self, x: "f32[s6, 2]", y: "f32[2]"): + sub: "f32[s6, 2]" = torch.ops.aten.sub.Tensor(x, y); x = y = None return (sub,) Graph signature: @@ -353,7 +353,7 @@ Result: # outputs getitem: USER_OUTPUT - Range constraints: {s77: VR[0, int_oo]} + Range constraints: {s6: VR[0, int_oo]} @@ -478,7 +478,7 @@ dynamic_shape_if_guard .. note:: - Tags: :doc:`torch.dynamic-shape `, :doc:`python.control-flow ` + Tags: :doc:`python.control-flow `, :doc:`torch.dynamic-shape ` Support Level: SUPPORTED @@ -756,7 +756,7 @@ list_contains .. note:: - Tags: :doc:`torch.dynamic-shape `, :doc:`python.data-structure `, :doc:`python.assert ` + Tags: :doc:`torch.dynamic-shape `, :doc:`python.assert `, :doc:`python.data-structure ` Support Level: SUPPORTED @@ -852,11 +852,11 @@ Result: ExportedProgram: class GraphModule(torch.nn.Module): - def forward(self, x: "f32[3, s27]"): + def forward(self, x: "f32[3, s11]"): # - sym_size_int_1: "Sym(s27)" = torch.ops.aten.sym_size.int(x, 1); x = None + sym_size_int_1: "Sym(s11)" = torch.ops.aten.sym_size.int(x, 1); x = None - add: "Sym(s27 + 1)" = sym_size_int_1 + 1; sym_size_int_1 = None + add: "Sym(s11 + 1)" = sym_size_int_1 + 1; sym_size_int_1 = None return (add,) Graph signature: @@ -866,5 +866,5 @@ Result: # outputs add: USER_OUTPUT - Range constraints: {s27: VR[0, int_oo]} + Range constraints: {s11: VR[0, int_oo]} diff --git a/main/accelerator.html b/main/accelerator.html index 60eb8719db8..17db274951f 100644 --- a/main/accelerator.html +++ b/main/accelerator.html @@ -143,7 +143,7 @@ fbq('track', 'PageView');