|
192 | 192 | <div class="pytorch-left-menu-search"> |
193 | 193 |
|
194 | 194 | <div class="version"> |
195 | | - <a href='https://pytorch.org/docs/versions.html'>master (1.10.0a0+git7fce0bb ) ▼</a> |
| 195 | + <a href='https://pytorch.org/docs/versions.html'>master (1.10.0a0+git145a20b ) ▼</a> |
196 | 196 | </div> |
197 | 197 |
|
198 | 198 |
|
@@ -526,6 +526,16 @@ <h1>Source code for torch._tensor</h1><div class="highlight"><pre> |
526 | 526 | <span class="nb">str</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">device</span><span class="p">),</span> |
527 | 527 | <span class="bp">self</span><span class="o">.</span><span class="n">requires_grad</span><span class="p">)</span> |
528 | 528 | <span class="k">return</span> <span class="p">(</span><span class="n">torch</span><span class="o">.</span><span class="n">_utils</span><span class="o">.</span><span class="n">_rebuild_mlc_tensor</span><span class="p">,</span> <span class="n">arg_mlc</span><span class="p">)</span> |
| 529 | + <span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">device</span><span class="o">.</span><span class="n">type</span> <span class="o">==</span> <span class="s1">'meta'</span><span class="p">:</span> |
| 530 | + <span class="c1"># NB: This implementation BREAKS storage sharing. Current</span> |
| 531 | + <span class="c1"># hypothesis is that no one cares for meta tensors.</span> |
| 532 | + <span class="n">arg_meta</span> <span class="o">=</span> <span class="p">(</span> |
| 533 | + <span class="bp">self</span><span class="o">.</span><span class="n">dtype</span><span class="p">,</span> |
| 534 | + <span class="nb">tuple</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">size</span><span class="p">()),</span> |
| 535 | + <span class="bp">self</span><span class="o">.</span><span class="n">stride</span><span class="p">(),</span> |
| 536 | + <span class="bp">self</span><span class="o">.</span><span class="n">requires_grad</span><span class="p">,</span> |
| 537 | + <span class="p">)</span> |
| 538 | + <span class="k">return</span> <span class="p">(</span><span class="n">torch</span><span class="o">.</span><span class="n">_utils</span><span class="o">.</span><span class="n">_rebuild_meta_tensor_no_storage</span><span class="p">,</span> <span class="n">arg_meta</span><span class="p">)</span> |
529 | 539 | <span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">is_quantized</span><span class="p">:</span> |
530 | 540 | <span class="c1"># quantizer_params can be different type based on torch attribute</span> |
531 | 541 | <span class="n">quantizer_params</span><span class="p">:</span> <span class="n">Union</span><span class="p">[</span><span class="n">Tuple</span><span class="p">[</span><span class="n">torch</span><span class="o">.</span><span class="n">qscheme</span><span class="p">,</span> <span class="nb">float</span><span class="p">,</span> <span class="nb">int</span><span class="p">],</span> <span class="n">Tuple</span><span class="p">[</span><span class="n">Any</span><span class="p">,</span> <span class="n">Tensor</span><span class="p">,</span> <span class="n">Tensor</span><span class="p">,</span> <span class="nb">int</span><span class="p">]]</span> |
@@ -595,7 +605,7 @@ <h1>Source code for torch._tensor</h1><div class="highlight"><pre> |
595 | 605 | <span class="c1"># All strings are unicode in Python 3.</span> |
596 | 606 | <span class="k">return</span> <span class="n">torch</span><span class="o">.</span><span class="n">_tensor_str</span><span class="o">.</span><span class="n">_str</span><span class="p">(</span><span class="bp">self</span><span class="p">)</span> |
597 | 607 |
|
598 | | -<div class="viewcode-block" id="Tensor.backward"><a class="viewcode-back" href="../../generated/torch.Tensor.backward.html#torch.Tensor.backward">[docs]</a> <span class="k">def</span> <span class="nf">backward</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">gradient</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">retain_graph</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">create_graph</span><span class="o">=</span><span class="kc">False</span><span class="p">,</span> <span class="n">inputs</span><span class="o">=</span><span class="kc">None</span><span class="p">):</span> |
| 608 | + <span class="k">def</span> <span class="nf">backward</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">gradient</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">retain_graph</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">create_graph</span><span class="o">=</span><span class="kc">False</span><span class="p">,</span> <span class="n">inputs</span><span class="o">=</span><span class="kc">None</span><span class="p">):</span> |
599 | 609 | <span class="sa">r</span><span class="sd">"""Computes the gradient of current tensor w.r.t. graph leaves.</span> |
600 | 610 |
|
601 | 611 | <span class="sd"> The graph is differentiated using the chain rule. If the tensor is</span> |
@@ -651,7 +661,7 @@ <h1>Source code for torch._tensor</h1><div class="highlight"><pre> |
651 | 661 | <span class="n">retain_graph</span><span class="o">=</span><span class="n">retain_graph</span><span class="p">,</span> |
652 | 662 | <span class="n">create_graph</span><span class="o">=</span><span class="n">create_graph</span><span class="p">,</span> |
653 | 663 | <span class="n">inputs</span><span class="o">=</span><span class="n">inputs</span><span class="p">)</span> |
654 | | - <span class="n">torch</span><span class="o">.</span><span class="n">autograd</span><span class="o">.</span><span class="n">backward</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">gradient</span><span class="p">,</span> <span class="n">retain_graph</span><span class="p">,</span> <span class="n">create_graph</span><span class="p">,</span> <span class="n">inputs</span><span class="o">=</span><span class="n">inputs</span><span class="p">)</span></div> |
| 664 | + <span class="n">torch</span><span class="o">.</span><span class="n">autograd</span><span class="o">.</span><span class="n">backward</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">gradient</span><span class="p">,</span> <span class="n">retain_graph</span><span class="p">,</span> <span class="n">create_graph</span><span class="p">,</span> <span class="n">inputs</span><span class="o">=</span><span class="n">inputs</span><span class="p">)</span> |
655 | 665 |
|
656 | 666 | <span class="k">def</span> <span class="nf">register_hook</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">hook</span><span class="p">):</span> |
657 | 667 | <span class="sa">r</span><span class="sd">"""Registers a backward hook.</span> |
|
0 commit comments