Skip to content

Commit faccceb

Browse files
committed
Generate Python docs from pytorch/pytorch@c99e75c
1 parent 19c3380 commit faccceb

File tree

1,750 files changed

+15505
-3281
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

1,750 files changed

+15505
-3281
lines changed

docs/master/__config__.html

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -194,7 +194,7 @@
194194
<div class="pytorch-left-menu-search">
195195

196196
<div class="version">
197-
<a href='https://pytorch.org/docs/versions.html'>master (1.10.0a0+gitaab14ff ) &#x25BC</a>
197+
<a href='https://pytorch.org/docs/versions.html'>master (1.10.0a0+gitc99e75c ) &#x25BC</a>
198198
</div>
199199

200200

docs/master/_modules/index.html

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -193,7 +193,7 @@
193193
<div class="pytorch-left-menu-search">
194194

195195
<div class="version">
196-
<a href='https://pytorch.org/docs/versions.html'>master (1.10.0a0+gitaab14ff ) &#x25BC</a>
196+
<a href='https://pytorch.org/docs/versions.html'>master (1.10.0a0+gitc99e75c ) &#x25BC</a>
197197
</div>
198198

199199

@@ -404,6 +404,7 @@ <h1>All modules for which code is available</h1>
404404
<li><a href="torch/_tensor_str.html">torch._tensor_str</a></li>
405405
<li><a href="torch/_utils.html">torch._utils</a></li>
406406
<li><a href="torch/_vmap_internals.html">torch._vmap_internals</a></li>
407+
<li><a href="torch/ao/quantization/quantize.html">torch.ao.quantization.quantize</a></li>
407408
<li><a href="torch/autocast_mode.html">torch.autocast_mode</a></li>
408409
<li><a href="torch/autograd.html">torch.autograd</a></li>
409410
<ul><li><a href="torch/autograd/anomaly_mode.html">torch.autograd.anomaly_mode</a></li>
@@ -421,12 +422,14 @@ <h1>All modules for which code is available</h1>
421422
<li><a href="torch/cuda.html">torch.cuda</a></li>
422423
<ul><li><a href="torch/cuda/amp/autocast_mode.html">torch.cuda.amp.autocast_mode</a></li>
423424
<li><a href="torch/cuda/amp/grad_scaler.html">torch.cuda.amp.grad_scaler</a></li>
425+
<li><a href="torch/cuda/graphs.html">torch.cuda.graphs</a></li>
424426
<li><a href="torch/cuda/memory.html">torch.cuda.memory</a></li>
425427
<li><a href="torch/cuda/nvtx.html">torch.cuda.nvtx</a></li>
426428
<li><a href="torch/cuda/random.html">torch.cuda.random</a></li>
427429
<li><a href="torch/cuda/streams.html">torch.cuda.streams</a></li>
428430
</ul><li><a href="torch/distributed.html">torch.distributed</a></li>
429-
<ul><li><a href="torch/distributed/algorithms/ddp_comm_hooks/default_hooks.html">torch.distributed.algorithms.ddp_comm_hooks.default_hooks</a></li>
431+
<ul><li><a href="torch/distributed/algorithms/ddp_comm_hooks/debugging_hooks.html">torch.distributed.algorithms.ddp_comm_hooks.debugging_hooks</a></li>
432+
<li><a href="torch/distributed/algorithms/ddp_comm_hooks/default_hooks.html">torch.distributed.algorithms.ddp_comm_hooks.default_hooks</a></li>
430433
<li><a href="torch/distributed/algorithms/ddp_comm_hooks/powerSGD_hook.html">torch.distributed.algorithms.ddp_comm_hooks.powerSGD_hook</a></li>
431434
<li><a href="torch/distributed/algorithms/join.html">torch.distributed.algorithms.join</a></li>
432435
<li><a href="torch/distributed/autograd.html">torch.distributed.autograd</a></li>
@@ -603,14 +606,14 @@ <h1>All modules for which code is available</h1>
603606
<li><a href="torch/quantization/fuse_modules.html">torch.quantization.fuse_modules</a></li>
604607
<li><a href="torch/quantization/observer.html">torch.quantization.observer</a></li>
605608
<li><a href="torch/quantization/qconfig.html">torch.quantization.qconfig</a></li>
606-
<li><a href="torch/quantization/quantize.html">torch.quantization.quantize</a></li>
607609
<li><a href="torch/quantization/stubs.html">torch.quantization.stubs</a></li>
608610
</ul><li><a href="torch/quasirandom.html">torch.quasirandom</a></li>
609611
<li><a href="torch/random.html">torch.random</a></li>
610612
<li><a href="torch/serialization.html">torch.serialization</a></li>
611613
<li><a href="torch/sparse.html">torch.sparse</a></li>
612614
<li><a href="torch/storage.html">torch.storage</a></li>
613615
<li><a href="torch/testing/_asserts.html">torch.testing._asserts</a></li>
616+
<li><a href="torch/testing/_creation.html">torch.testing._creation</a></li>
614617
<li><a href="torch/utils/benchmark/utils/common.html">torch.utils.benchmark.utils.common</a></li>
615618
<li><a href="torch/utils/benchmark/utils/timer.html">torch.utils.benchmark.utils.timer</a></li>
616619
<li><a href="torch/utils/benchmark/utils/valgrind_wrapper/timer_interface.html">torch.utils.benchmark.utils.valgrind_wrapper.timer_interface</a></li>

docs/master/_modules/torch.html

Lines changed: 15 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -193,7 +193,7 @@
193193
<div class="pytorch-left-menu-search">
194194

195195
<div class="version">
196-
<a href='https://pytorch.org/docs/versions.html'>master (1.10.0a0+gitaab14ff ) &#x25BC</a>
196+
<a href='https://pytorch.org/docs/versions.html'>master (1.10.0a0+gitc99e75c ) &#x25BC</a>
197197
</div>
198198

199199

@@ -688,7 +688,7 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
688688
<span class="k">return</span> <span class="nb">type</span><span class="p">(</span><span class="n">obj</span><span class="p">)</span> <span class="ow">in</span> <span class="n">_storage_classes</span>
689689

690690

691-
<div class="viewcode-block" id="set_default_tensor_type"><a class="viewcode-back" href="../generated/torch.set_default_tensor_type.html#torch.set_default_tensor_type">[docs]</a><span class="k">def</span> <span class="nf">set_default_tensor_type</span><span class="p">(</span><span class="n">t</span><span class="p">):</span>
691+
<span class="k">def</span> <span class="nf">set_default_tensor_type</span><span class="p">(</span><span class="n">t</span><span class="p">):</span>
692692
<span class="sa">r</span><span class="sd">&quot;&quot;&quot;Sets the default ``torch.Tensor`` type to floating point tensor type</span>
693693
<span class="sd"> ``t``. This type will also be used as default floating point type for</span>
694694
<span class="sd"> type inference in :func:`torch.tensor`.</span>
@@ -709,10 +709,10 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
709709
<span class="sd"> &quot;&quot;&quot;</span>
710710
<span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">t</span><span class="p">,</span> <span class="n">_string_classes</span><span class="p">):</span>
711711
<span class="n">t</span> <span class="o">=</span> <span class="n">_import_dotted_name</span><span class="p">(</span><span class="n">t</span><span class="p">)</span>
712-
<span class="n">_C</span><span class="o">.</span><span class="n">_set_default_tensor_type</span><span class="p">(</span><span class="n">t</span><span class="p">)</span></div>
712+
<span class="n">_C</span><span class="o">.</span><span class="n">_set_default_tensor_type</span><span class="p">(</span><span class="n">t</span><span class="p">)</span>
713713

714714

715-
<div class="viewcode-block" id="set_default_dtype"><a class="viewcode-back" href="../generated/torch.set_default_dtype.html#torch.set_default_dtype">[docs]</a><span class="k">def</span> <span class="nf">set_default_dtype</span><span class="p">(</span><span class="n">d</span><span class="p">):</span>
715+
<span class="k">def</span> <span class="nf">set_default_dtype</span><span class="p">(</span><span class="n">d</span><span class="p">):</span>
716716
<span class="sa">r</span><span class="sd">&quot;&quot;&quot;</span>
717717

718718
<span class="sd"> Sets the default floating point dtype to :attr:`d`. Supports torch.float32</span>
@@ -755,9 +755,9 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
755755
<span class="sd"> torch.complex128</span>
756756

757757
<span class="sd"> &quot;&quot;&quot;</span>
758-
<span class="n">_C</span><span class="o">.</span><span class="n">_set_default_dtype</span><span class="p">(</span><span class="n">d</span><span class="p">)</span></div>
758+
<span class="n">_C</span><span class="o">.</span><span class="n">_set_default_dtype</span><span class="p">(</span><span class="n">d</span><span class="p">)</span>
759759

760-
<span class="k">def</span> <span class="nf">use_deterministic_algorithms</span><span class="p">(</span><span class="n">mode</span><span class="p">):</span>
760+
<div class="viewcode-block" id="use_deterministic_algorithms"><a class="viewcode-back" href="../generated/torch.use_deterministic_algorithms.html#torch.use_deterministic_algorithms">[docs]</a><span class="k">def</span> <span class="nf">use_deterministic_algorithms</span><span class="p">(</span><span class="n">mode</span><span class="p">):</span>
761761
<span class="sa">r</span><span class="sd">&quot;&quot;&quot; Sets whether PyTorch operations must use &quot;deterministic&quot;</span>
762762
<span class="sd"> algorithms. That is, algorithms which, given the same input, and when</span>
763763
<span class="sd"> run on the same software and hardware, always produce the same output.</span>
@@ -872,15 +872,15 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
872872
<span class="sd"> ...</span>
873873
<span class="sd"> RuntimeError: index_add_cuda_ does not have a deterministic implementation...</span>
874874
<span class="sd"> &quot;&quot;&quot;</span>
875-
<span class="n">_C</span><span class="o">.</span><span class="n">_set_deterministic_algorithms</span><span class="p">(</span><span class="n">mode</span><span class="p">)</span>
875+
<span class="n">_C</span><span class="o">.</span><span class="n">_set_deterministic_algorithms</span><span class="p">(</span><span class="n">mode</span><span class="p">)</span></div>
876876

877-
<div class="viewcode-block" id="are_deterministic_algorithms_enabled"><a class="viewcode-back" href="../generated/torch.are_deterministic_algorithms_enabled.html#torch.are_deterministic_algorithms_enabled">[docs]</a><span class="k">def</span> <span class="nf">are_deterministic_algorithms_enabled</span><span class="p">():</span>
877+
<span class="k">def</span> <span class="nf">are_deterministic_algorithms_enabled</span><span class="p">():</span>
878878
<span class="sa">r</span><span class="sd">&quot;&quot;&quot;Returns True if the global deterministic flag is turned on. Refer to</span>
879879
<span class="sd"> :func:`torch.use_deterministic_algorithms` documentation for more details.</span>
880880
<span class="sd"> &quot;&quot;&quot;</span>
881-
<span class="k">return</span> <span class="n">_C</span><span class="o">.</span><span class="n">_get_deterministic_algorithms</span><span class="p">()</span></div>
881+
<span class="k">return</span> <span class="n">_C</span><span class="o">.</span><span class="n">_get_deterministic_algorithms</span><span class="p">()</span>
882882

883-
<div class="viewcode-block" id="set_warn_always"><a class="viewcode-back" href="../generated/torch.set_warn_always.html#torch.set_warn_always">[docs]</a><span class="k">def</span> <span class="nf">set_warn_always</span><span class="p">(</span><span class="n">b</span><span class="p">):</span>
883+
<span class="k">def</span> <span class="nf">set_warn_always</span><span class="p">(</span><span class="n">b</span><span class="p">):</span>
884884
<span class="sa">r</span><span class="sd">&quot;&quot;&quot;When this flag is False (default) then some PyTorch warnings may only</span>
885885
<span class="sd"> appear once per process. This helps avoid excessive warning information.</span>
886886
<span class="sd"> Setting it to True causes these warnings to always appear, which may be</span>
@@ -890,7 +890,7 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
890890
<span class="sd"> b (:class:`bool`): If True, force warnings to always be emitted</span>
891891
<span class="sd"> If False, set to the default behaviour</span>
892892
<span class="sd"> &quot;&quot;&quot;</span>
893-
<span class="n">_C</span><span class="o">.</span><span class="n">_set_warnAlways</span><span class="p">(</span><span class="n">b</span><span class="p">)</span></div>
893+
<span class="n">_C</span><span class="o">.</span><span class="n">_set_warnAlways</span><span class="p">(</span><span class="n">b</span><span class="p">)</span>
894894

895895
<span class="k">def</span> <span class="nf">is_warn_always_enabled</span><span class="p">():</span>
896896
<span class="sa">r</span><span class="sd">&quot;&quot;&quot;Returns True if the global warn_always flag is turned on. Refer to</span>
@@ -1051,14 +1051,14 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
10511051
<span class="c1">################################################################################</span>
10521052

10531053
<span class="c1"># needs to be before the submodule imports to avoid circular dependencies</span>
1054-
<div class="viewcode-block" id="_assert"><a class="viewcode-back" href="../generated/torch._assert.html#torch._assert">[docs]</a><span class="k">def</span> <span class="nf">_assert</span><span class="p">(</span><span class="n">condition</span><span class="p">,</span> <span class="n">message</span><span class="p">):</span>
1054+
<span class="k">def</span> <span class="nf">_assert</span><span class="p">(</span><span class="n">condition</span><span class="p">,</span> <span class="n">message</span><span class="p">):</span>
10551055
<span class="sa">r</span><span class="sd">&quot;&quot;&quot;A wrapper around Python&#39;s assert which is symbolically traceable.</span>
10561056
<span class="sd"> &quot;&quot;&quot;</span>
10571057
<span class="kn">from</span> <span class="nn">.overrides</span> <span class="kn">import</span> <span class="n">has_torch_function</span><span class="p">,</span> <span class="n">handle_torch_function</span>
10581058

10591059
<span class="k">if</span> <span class="nb">type</span><span class="p">(</span><span class="n">condition</span><span class="p">)</span> <span class="ow">is</span> <span class="ow">not</span> <span class="n">torch</span><span class="o">.</span><span class="n">Tensor</span> <span class="ow">and</span> <span class="n">has_torch_function</span><span class="p">((</span><span class="n">condition</span><span class="p">,)):</span>
10601060
<span class="k">return</span> <span class="n">handle_torch_function</span><span class="p">(</span><span class="n">_assert</span><span class="p">,</span> <span class="p">(</span><span class="n">condition</span><span class="p">,),</span> <span class="n">condition</span><span class="p">,</span> <span class="n">message</span><span class="p">)</span>
1061-
<span class="k">assert</span> <span class="n">condition</span><span class="p">,</span> <span class="n">message</span></div>
1061+
<span class="k">assert</span> <span class="n">condition</span><span class="p">,</span> <span class="n">message</span>
10621062

10631063
<span class="c1">################################################################################</span>
10641064
<span class="c1"># Import most common subpackages</span>
@@ -1115,9 +1115,9 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
11151115
<span class="k">del</span> <span class="n">_torch_docs</span><span class="p">,</span> <span class="n">_tensor_docs</span><span class="p">,</span> <span class="n">_storage_docs</span>
11161116

11171117

1118-
<span class="k">def</span> <span class="nf">compiled_with_cxx11_abi</span><span class="p">():</span>
1118+
<div class="viewcode-block" id="compiled_with_cxx11_abi"><a class="viewcode-back" href="../generated/torch.compiled_with_cxx11_abi.html#torch.compiled_with_cxx11_abi">[docs]</a><span class="k">def</span> <span class="nf">compiled_with_cxx11_abi</span><span class="p">():</span>
11191119
<span class="sa">r</span><span class="sd">&quot;&quot;&quot;Returns whether PyTorch was built with _GLIBCXX_USE_CXX11_ABI=1&quot;&quot;&quot;</span>
1120-
<span class="k">return</span> <span class="n">_C</span><span class="o">.</span><span class="n">_GLIBCXX_USE_CXX11_ABI</span>
1120+
<span class="k">return</span> <span class="n">_C</span><span class="o">.</span><span class="n">_GLIBCXX_USE_CXX11_ABI</span></div>
11211121

11221122

11231123
<span class="c1"># Import the ops &quot;namespace&quot;</span>

docs/master/_modules/torch/__config__.html

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -193,7 +193,7 @@
193193
<div class="pytorch-left-menu-search">
194194

195195
<div class="version">
196-
<a href='https://pytorch.org/docs/versions.html'>master (1.10.0a0+gitaab14ff ) &#x25BC</a>
196+
<a href='https://pytorch.org/docs/versions.html'>master (1.10.0a0+gitc99e75c ) &#x25BC</a>
197197
</div>
198198

199199

0 commit comments

Comments
 (0)