|
192 | 192 | <div class="pytorch-left-menu-search">
|
193 | 193 |
|
194 | 194 | <div class="version">
|
195 |
| - <a href='https://pytorch.org/docs/versions.html'>master (1.10.0a0+git210259b ) ▼</a> |
| 195 | + <a href='https://pytorch.org/docs/versions.html'>master (1.10.0a0+gitd7ab600 ) ▼</a> |
196 | 196 | </div>
|
197 | 197 |
|
198 | 198 |
|
@@ -393,7 +393,6 @@ <h1>Source code for torch._tensor</h1><div class="highlight"><pre>
|
393 | 393 | <span class="kn">from</span> <span class="nn">numbers</span> <span class="kn">import</span> <span class="n">Number</span>
|
394 | 394 | <span class="kn">from</span> <span class="nn">typing</span> <span class="kn">import</span> <span class="n">Any</span><span class="p">,</span> <span class="n">Dict</span><span class="p">,</span> <span class="n">Optional</span><span class="p">,</span> <span class="n">Tuple</span><span class="p">,</span> <span class="n">Union</span>
|
395 | 395 | <span class="kn">import</span> <span class="nn">warnings</span>
|
396 |
| -<span class="kn">import</span> <span class="nn">weakref</span> |
397 | 396 |
|
398 | 397 | <span class="kn">import</span> <span class="nn">torch</span>
|
399 | 398 | <span class="kn">import</span> <span class="nn">torch._C</span> <span class="k">as</span> <span class="nn">_C</span>
|
@@ -746,33 +745,6 @@ <h1>Source code for torch._tensor</h1><div class="highlight"><pre>
|
746 | 745 | <span class="s2"> have forward mode AD gradients.</span>
|
747 | 746 | <span class="s2"> """</span><span class="p">)</span>
|
748 | 747 |
|
749 |
| - <span class="k">def</span> <span class="nf">retain_grad</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span> |
750 |
| - <span class="sa">r</span><span class="sd">"""Enables .grad attribute for non-leaf Tensors."""</span> |
751 |
| - <span class="k">if</span> <span class="n">has_torch_function_unary</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span> |
752 |
| - <span class="k">return</span> <span class="n">handle_torch_function</span><span class="p">(</span><span class="n">Tensor</span><span class="o">.</span><span class="n">retain_grad</span><span class="p">,</span> <span class="p">(</span><span class="bp">self</span><span class="p">,),</span> <span class="bp">self</span><span class="p">)</span> |
753 |
| - <span class="k">if</span> <span class="ow">not</span> <span class="bp">self</span><span class="o">.</span><span class="n">requires_grad</span><span class="p">:</span> |
754 |
| - <span class="k">raise</span> <span class="ne">RuntimeError</span><span class="p">(</span><span class="s2">"can't retain_grad on Tensor that has requires_grad=False"</span><span class="p">)</span> |
755 |
| - <span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">is_leaf</span><span class="p">:</span> <span class="c1"># no-op for leaves</span> |
756 |
| - <span class="k">return</span> |
757 |
| - <span class="k">if</span> <span class="nb">hasattr</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="s1">'retains_grad'</span><span class="p">):</span> |
758 |
| - <span class="k">return</span> |
759 |
| - <span class="n">weak_self</span> <span class="o">=</span> <span class="n">weakref</span><span class="o">.</span><span class="n">ref</span><span class="p">(</span><span class="bp">self</span><span class="p">)</span> |
760 |
| - |
761 |
| - <span class="k">def</span> <span class="nf">retain_grad_hook</span><span class="p">(</span><span class="n">grad</span><span class="p">):</span> |
762 |
| - <span class="n">var</span> <span class="o">=</span> <span class="n">weak_self</span><span class="p">()</span> |
763 |
| - <span class="k">if</span> <span class="n">var</span> <span class="ow">is</span> <span class="kc">None</span><span class="p">:</span> |
764 |
| - <span class="k">return</span> |
765 |
| - <span class="k">if</span> <span class="n">var</span><span class="o">.</span><span class="n">_grad</span> <span class="ow">is</span> <span class="kc">None</span><span class="p">:</span> |
766 |
| - <span class="k">if</span> <span class="n">grad</span><span class="o">.</span><span class="n">is_sparse</span><span class="p">:</span> |
767 |
| - <span class="n">var</span><span class="o">.</span><span class="n">_grad</span> <span class="o">=</span> <span class="n">grad</span><span class="o">.</span><span class="n">clone</span><span class="p">()</span> |
768 |
| - <span class="k">else</span><span class="p">:</span> |
769 |
| - <span class="n">var</span><span class="o">.</span><span class="n">_grad</span> <span class="o">=</span> <span class="n">grad</span><span class="o">.</span><span class="n">clone</span><span class="p">(</span><span class="n">memory_format</span><span class="o">=</span><span class="n">torch</span><span class="o">.</span><span class="n">contiguous_format</span><span class="p">)</span> |
770 |
| - <span class="k">else</span><span class="p">:</span> |
771 |
| - <span class="n">var</span><span class="o">.</span><span class="n">_grad</span> <span class="o">=</span> <span class="n">var</span><span class="o">.</span><span class="n">_grad</span> <span class="o">+</span> <span class="n">grad</span> |
772 |
| - |
773 |
| - <span class="bp">self</span><span class="o">.</span><span class="n">register_hook</span><span class="p">(</span><span class="n">retain_grad_hook</span><span class="p">)</span> |
774 |
| - <span class="bp">self</span><span class="o">.</span><span class="n">retains_grad</span> <span class="o">=</span> <span class="kc">True</span> |
775 |
| - |
776 | 748 | <span class="k">def</span> <span class="nf">is_shared</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
|
777 | 749 | <span class="sa">r</span><span class="sd">"""Checks if tensor is in shared memory.</span>
|
778 | 750 |
|
@@ -1386,12 +1358,6 @@ <h1>Source code for torch._tensor</h1><div class="highlight"><pre>
|
1386 | 1358 | <span class="c1"># TODO mypy doesn't support @property, see: https://github.com/python/mypy/issues/6185</span>
|
1387 | 1359 | <span class="k">return</span> <span class="n">handle_torch_function</span><span class="p">(</span><span class="n">Tensor</span><span class="o">.</span><span class="n">grad</span><span class="o">.</span><span class="fm">__get__</span><span class="p">,</span> <span class="p">(</span><span class="bp">self</span><span class="p">,),</span> <span class="bp">self</span><span class="p">)</span> <span class="c1"># type: ignore[attr-defined]</span>
|
1388 | 1360 |
|
1389 |
| - <span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">requires_grad</span> <span class="ow">and</span> <span class="ow">not</span> <span class="nb">hasattr</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="s2">"retains_grad"</span><span class="p">)</span> <span class="ow">and</span> <span class="ow">not</span> <span class="bp">self</span><span class="o">.</span><span class="n">is_leaf</span> <span class="ow">and</span> <span class="bp">self</span><span class="o">.</span><span class="n">_grad</span> <span class="ow">is</span> <span class="kc">None</span><span class="p">:</span> |
1390 |
| - <span class="n">warnings</span><span class="o">.</span><span class="n">warn</span><span class="p">(</span><span class="s2">"The .grad attribute of a Tensor that is not a leaf Tensor is being accessed. Its .grad "</span> |
1391 |
| - <span class="s2">"attribute won't be populated during autograd.backward(). If you indeed want the gradient "</span> |
1392 |
| - <span class="s2">"for a non-leaf Tensor, use .retain_grad() on the non-leaf Tensor. If you access the "</span> |
1393 |
| - <span class="s2">"non-leaf Tensor by mistake, make sure you access the leaf Tensor instead. See "</span> |
1394 |
| - <span class="s2">"github.com/pytorch/pytorch/pull/30531 for more information."</span><span class="p">,</span> <span class="n">stacklevel</span><span class="o">=</span><span class="mi">2</span><span class="p">)</span> |
1395 | 1361 | <span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">_grad</span>
|
1396 | 1362 |
|
1397 | 1363 | <span class="nd">@grad</span><span class="o">.</span><span class="n">setter</span>
|
|
0 commit comments